[ 495.244308] env[61573]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61573) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 495.244734] env[61573]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61573) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 495.244734] env[61573]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61573) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 495.245101] env[61573]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 495.346205] env[61573]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61573) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 495.358836] env[61573]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.013s {{(pid=61573) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 495.502319] env[61573]: INFO nova.virt.driver [None req-d8b26512-4118-4045-b747-082f3e91feba None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 495.579958] env[61573]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 495.580158] env[61573]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 495.580247] env[61573]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61573) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 498.722309] env[61573]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-f782ed84-def1-4428-b787-93890d120ae7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.738191] env[61573]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61573) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 498.738353] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-2666a0ed-feff-4286-a511-4684deaa43f1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.771772] env[61573]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 53c5f. [ 498.772034] env[61573]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.192s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.772516] env[61573]: INFO nova.virt.vmwareapi.driver [None req-d8b26512-4118-4045-b747-082f3e91feba None None] VMware vCenter version: 7.0.3 [ 498.776052] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d282506-7792-45a3-bb76-e77db790fbf5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.798772] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb11f0b6-1d51-4bd5-ac51-20c8d056a86f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.805724] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29243112-982a-4b63-82fe-745a609c8473 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.812870] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2475f2a-a057-4d95-9be3-22b4ae62b938 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.825992] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee33959-a200-45c5-9052-718faff3a2da {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.832178] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fe76f2-60d9-42c6-aa53-df53d30de971 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.862562] env[61573]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-24741a90-7d5a-4957-9e0d-a9bef7e3f413 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.868355] env[61573]: DEBUG nova.virt.vmwareapi.driver [None req-d8b26512-4118-4045-b747-082f3e91feba None None] Extension org.openstack.compute already exists. {{(pid=61573) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 498.871070] env[61573]: INFO nova.compute.provider_config [None req-d8b26512-4118-4045-b747-082f3e91feba None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 498.893182] env[61573]: DEBUG nova.context [None req-d8b26512-4118-4045-b747-082f3e91feba None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),da4674da-6bf8-4efe-b826-cd13a15d174e(cell1) {{(pid=61573) load_cells /opt/stack/nova/nova/context.py:464}} [ 498.895402] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.895666] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.896380] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.896854] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Acquiring lock "da4674da-6bf8-4efe-b826-cd13a15d174e" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.897072] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Lock "da4674da-6bf8-4efe-b826-cd13a15d174e" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.898138] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Lock "da4674da-6bf8-4efe-b826-cd13a15d174e" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.920095] env[61573]: INFO dbcounter [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Registered counter for database nova_cell0 [ 498.928633] env[61573]: INFO dbcounter [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Registered counter for database nova_cell1 [ 498.932085] env[61573]: DEBUG oslo_db.sqlalchemy.engines [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61573) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.932684] env[61573]: DEBUG oslo_db.sqlalchemy.engines [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61573) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.937049] env[61573]: DEBUG dbcounter [-] [61573] Writer thread running {{(pid=61573) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 498.938353] env[61573]: DEBUG dbcounter [-] [61573] Writer thread running {{(pid=61573) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 498.940506] env[61573]: ERROR nova.db.main.api [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 498.940506] env[61573]: result = function(*args, **kwargs) [ 498.940506] env[61573]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.940506] env[61573]: return func(*args, **kwargs) [ 498.940506] env[61573]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.940506] env[61573]: result = fn(*args, **kwargs) [ 498.940506] env[61573]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.940506] env[61573]: return f(*args, **kwargs) [ 498.940506] env[61573]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 498.940506] env[61573]: return db.service_get_minimum_version(context, binaries) [ 498.940506] env[61573]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.940506] env[61573]: _check_db_access() [ 498.940506] env[61573]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.940506] env[61573]: stacktrace = ''.join(traceback.format_stack()) [ 498.940506] env[61573]: [ 498.941602] env[61573]: ERROR nova.db.main.api [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 498.941602] env[61573]: result = function(*args, **kwargs) [ 498.941602] env[61573]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.941602] env[61573]: return func(*args, **kwargs) [ 498.941602] env[61573]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.941602] env[61573]: result = fn(*args, **kwargs) [ 498.941602] env[61573]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.941602] env[61573]: return f(*args, **kwargs) [ 498.941602] env[61573]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 498.941602] env[61573]: return db.service_get_minimum_version(context, binaries) [ 498.941602] env[61573]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.941602] env[61573]: _check_db_access() [ 498.941602] env[61573]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.941602] env[61573]: stacktrace = ''.join(traceback.format_stack()) [ 498.941602] env[61573]: [ 498.942056] env[61573]: WARNING nova.objects.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Failed to get minimum service version for cell da4674da-6bf8-4efe-b826-cd13a15d174e [ 498.942148] env[61573]: WARNING nova.objects.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 498.942597] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Acquiring lock "singleton_lock" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 498.942760] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Acquired lock "singleton_lock" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 498.943027] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Releasing lock "singleton_lock" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 498.943360] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Full set of CONF: {{(pid=61573) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 498.943529] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ******************************************************************************** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 498.943659] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] Configuration options gathered from: {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 498.943796] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 498.943990] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 498.944132] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ================================================================================ {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 498.944350] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] allow_resize_to_same_host = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.944523] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] arq_binding_timeout = 300 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.944658] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] backdoor_port = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.944788] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] backdoor_socket = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.944957] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] block_device_allocate_retries = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.945142] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] block_device_allocate_retries_interval = 3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.945319] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cert = self.pem {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.945487] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.945658] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute_monitors = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.945832] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] config_dir = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.946016] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] config_drive_format = iso9660 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.946171] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.946348] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] config_source = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.946544] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] console_host = devstack {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.946690] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] control_exchange = nova {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.946880] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cpu_allocation_ratio = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.947071] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] daemon = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.947251] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] debug = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.947413] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] default_access_ip_network_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.947585] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] default_availability_zone = nova {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.947746] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] default_ephemeral_format = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.947909] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] default_green_pool_size = 1000 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.948164] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.948336] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] default_schedule_zone = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.948500] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] disk_allocation_ratio = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.948665] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] enable_new_services = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.948883] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] enabled_apis = ['osapi_compute'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.949073] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] enabled_ssl_apis = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.949243] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] flat_injected = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.949407] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] force_config_drive = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.949573] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] force_raw_images = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.949742] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] graceful_shutdown_timeout = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.949922] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] heal_instance_info_cache_interval = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.950172] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] host = cpu-1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.950358] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.950524] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.950690] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.950922] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.951108] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] instance_build_timeout = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.951277] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] instance_delete_interval = 300 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.951451] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] instance_format = [instance: %(uuid)s] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.951623] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] instance_name_template = instance-%08x {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.951792] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] instance_usage_audit = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.951992] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] instance_usage_audit_period = month {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.952199] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.952380] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.952553] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] internal_service_availability_zone = internal {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.952715] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] key = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.952881] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] live_migration_retry_count = 30 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.953062] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] log_config_append = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.953239] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.953406] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] log_dir = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.953570] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] log_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.953703] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] log_options = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.953868] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] log_rotate_interval = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.954054] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] log_rotate_interval_type = days {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.954228] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] log_rotation_type = none {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.954362] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.954494] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.954678] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.954851] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.954985] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.955166] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] long_rpc_timeout = 1800 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.955329] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] max_concurrent_builds = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.955492] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] max_concurrent_live_migrations = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.955655] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] max_concurrent_snapshots = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.955818] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] max_local_block_devices = 3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.955981] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] max_logfile_count = 30 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.956155] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] max_logfile_size_mb = 200 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.956318] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] maximum_instance_delete_attempts = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.956488] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] metadata_listen = 0.0.0.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.956656] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] metadata_listen_port = 8775 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.956874] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] metadata_workers = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.957073] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] migrate_max_retries = -1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.957253] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] mkisofs_cmd = genisoimage {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.957467] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.957604] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] my_ip = 10.180.1.21 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.957773] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] network_allocate_retries = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.957955] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.958145] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.958313] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] osapi_compute_listen_port = 8774 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.958484] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] osapi_compute_unique_server_name_scope = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.958655] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] osapi_compute_workers = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.958833] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] password_length = 12 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.959014] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] periodic_enable = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.959190] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] periodic_fuzzy_delay = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.959365] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] pointer_model = usbtablet {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.959540] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] preallocate_images = none {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.959704] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] publish_errors = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.959839] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] pybasedir = /opt/stack/nova {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.960007] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ram_allocation_ratio = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.960180] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] rate_limit_burst = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.960349] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] rate_limit_except_level = CRITICAL {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.960511] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] rate_limit_interval = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.960674] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] reboot_timeout = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.960835] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] reclaim_instance_interval = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.960998] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] record = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.961191] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] reimage_timeout_per_gb = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.961356] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] report_interval = 120 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.961516] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] rescue_timeout = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.961678] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] reserved_host_cpus = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.961840] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] reserved_host_disk_mb = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.962010] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] reserved_host_memory_mb = 512 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.962182] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] reserved_huge_pages = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.962346] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] resize_confirm_window = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.962510] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] resize_fs_using_block_device = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.962670] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] resume_guests_state_on_host_boot = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.962840] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.963012] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] rpc_response_timeout = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.963188] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] run_external_periodic_tasks = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.963361] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] running_deleted_instance_action = reap {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.963523] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.963685] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] running_deleted_instance_timeout = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.963849] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler_instance_sync_interval = 120 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.964029] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_down_time = 720 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.964208] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] servicegroup_driver = db {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.964373] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] shelved_offload_time = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.964537] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] shelved_poll_interval = 3600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.964707] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] shutdown_timeout = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.964874] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] source_is_ipv6 = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.965048] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ssl_only = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.965308] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.965479] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] sync_power_state_interval = 600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.965643] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] sync_power_state_pool_size = 1000 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.965822] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] syslog_log_facility = LOG_USER {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.965986] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] tempdir = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.966165] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] timeout_nbd = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.966342] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] transport_url = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.966504] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] update_resources_interval = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.966670] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] use_cow_images = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.966853] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] use_eventlog = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.967042] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] use_journal = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.967213] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] use_json = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.967377] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] use_rootwrap_daemon = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.967539] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] use_stderr = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.967700] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] use_syslog = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.967860] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vcpu_pin_set = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.968042] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plugging_is_fatal = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.968216] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plugging_timeout = 300 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.968384] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] virt_mkfs = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.968547] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] volume_usage_poll_interval = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.968712] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] watch_log_file = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.968908] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] web = /usr/share/spice-html5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.969114] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_concurrency.disable_process_locking = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.969411] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.969596] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.969768] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.969945] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.970137] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.970308] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.970494] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.auth_strategy = keystone {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.970666] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.compute_link_prefix = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.970849] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.971040] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.dhcp_domain = novalocal {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.971223] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.enable_instance_password = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.971394] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.glance_link_prefix = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.971564] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.971740] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.971908] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.instance_list_per_project_cells = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.972091] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.list_records_by_skipping_down_cells = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.972260] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.local_metadata_per_cell = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.972461] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.max_limit = 1000 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.972609] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.metadata_cache_expiration = 15 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.972790] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.neutron_default_tenant_id = default {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.972966] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.use_neutron_default_nets = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.973156] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.973337] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.973510] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.973689] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.973867] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.vendordata_dynamic_targets = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.974053] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.vendordata_jsonfile_path = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.974244] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.974442] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.backend = dogpile.cache.memcached {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.974613] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.backend_argument = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.974788] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.config_prefix = cache.oslo {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.974962] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.dead_timeout = 60.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.975147] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.debug_cache_backend = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.975313] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.enable_retry_client = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.975481] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.enable_socket_keepalive = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.975655] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.enabled = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.975825] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.enforce_fips_mode = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.975995] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.expiration_time = 600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.976177] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.hashclient_retry_attempts = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.976350] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.976520] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_dead_retry = 300 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.976684] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_password = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.976883] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.977060] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.977236] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_pool_maxsize = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.977404] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.977571] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_sasl_enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.977756] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.977931] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.978113] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.memcache_username = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.978285] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.proxies = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.978449] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.redis_password = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.978622] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.978824] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.978995] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.redis_server = localhost:6379 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.979188] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.redis_socket_timeout = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.979353] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.redis_username = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.979522] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.retry_attempts = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.979693] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.retry_delay = 0.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.979875] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.socket_keepalive_count = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.980059] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.socket_keepalive_idle = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.980244] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.socket_keepalive_interval = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.980408] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.tls_allowed_ciphers = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.980568] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.tls_cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.980727] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.tls_certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.980895] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.tls_enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.981071] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cache.tls_keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.981249] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.auth_section = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.981424] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.auth_type = password {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.981587] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.981768] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.981932] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.982119] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.982292] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.cross_az_attach = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.982460] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.debug = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.982625] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.endpoint_template = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.982794] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.http_retries = 3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.982961] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.983139] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.983318] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.os_region_name = RegionOne {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.983486] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.983651] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cinder.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.983831] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.984007] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.cpu_dedicated_set = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.984180] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.cpu_shared_set = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.984353] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.image_type_exclude_list = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.984521] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.984689] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.984856] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.985036] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.985216] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.985383] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.resource_provider_association_refresh = 300 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.985547] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.985713] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.shutdown_retry_interval = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.985897] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.986107] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] conductor.workers = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.986284] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] console.allowed_origins = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.986449] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] console.ssl_ciphers = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.986623] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] console.ssl_minimum_version = default {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.986801] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] consoleauth.enforce_session_timeout = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.986998] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] consoleauth.token_ttl = 600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.987184] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.987346] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.987514] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.987675] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.connect_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.987838] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.connect_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.988009] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.endpoint_override = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.988225] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.988386] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.988538] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.max_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.988698] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.min_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.988892] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.region_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.989115] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.retriable_status_codes = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.989240] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.service_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.989414] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.service_type = accelerator {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.989581] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.989743] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.status_code_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.989905] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.status_code_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.990075] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.990264] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.990428] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] cyborg.version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.990612] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.backend = sqlalchemy {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.990846] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.connection = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.990949] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.connection_debug = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.991152] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.connection_parameters = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.991347] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.connection_recycle_time = 3600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.991518] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.connection_trace = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.991685] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.db_inc_retry_interval = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.991854] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.db_max_retries = 20 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.992030] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.db_max_retry_interval = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.992202] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.db_retry_interval = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.992368] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.max_overflow = 50 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.992532] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.max_pool_size = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.992696] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.max_retries = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.992869] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.993040] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.mysql_wsrep_sync_wait = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.993204] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.pool_timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.993364] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.retry_interval = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.993523] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.slave_connection = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.993685] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.sqlite_synchronous = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.993849] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] database.use_db_reconnect = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.994041] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.backend = sqlalchemy {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.994222] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.connection = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.994392] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.connection_debug = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.994565] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.connection_parameters = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.994729] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.connection_recycle_time = 3600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.994893] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.connection_trace = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.995071] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.db_inc_retry_interval = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.995237] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.db_max_retries = 20 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.995401] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.db_max_retry_interval = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.995564] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.db_retry_interval = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.995728] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.max_overflow = 50 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.995892] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.max_pool_size = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.996068] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.max_retries = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.996245] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.996507] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.996714] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.pool_timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.996898] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.retry_interval = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.997090] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.slave_connection = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.997253] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] api_database.sqlite_synchronous = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.997435] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] devices.enabled_mdev_types = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.997617] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.997796] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.997966] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ephemeral_storage_encryption.enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.998150] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.998327] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.api_servers = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.998498] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.998662] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.998854] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.999040] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.connect_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.999207] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.connect_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.999373] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.debug = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.999543] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.default_trusted_certificate_ids = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.999708] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.enable_certificate_validation = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.999894] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.enable_rbd_download = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.000083] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.endpoint_override = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.000259] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.000426] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.000590] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.max_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.000752] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.min_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.000921] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.num_retries = 3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.001116] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.rbd_ceph_conf = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.001295] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.rbd_connect_timeout = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.001470] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.rbd_pool = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.001644] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.rbd_user = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.001809] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.region_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.001971] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.retriable_status_codes = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.002149] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.service_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.002322] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.service_type = image {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.002490] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.002652] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.status_code_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.002812] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.status_code_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.002974] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.003170] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.003340] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.verify_glance_signatures = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.003503] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] glance.version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.003673] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] guestfs.debug = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.003851] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] mks.enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.004240] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.004439] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] image_cache.manager_interval = 2400 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.004616] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] image_cache.precache_concurrency = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.004791] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] image_cache.remove_unused_base_images = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.004967] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.005161] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.005346] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] image_cache.subdirectory_name = _base {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.005525] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.api_max_retries = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.005695] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.api_retry_interval = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.005858] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.auth_section = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.006035] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.auth_type = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.006205] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.006365] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.006533] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.006702] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.conductor_group = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.006864] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.connect_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.007037] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.connect_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.007201] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.endpoint_override = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.007379] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.007543] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.007709] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.max_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.007872] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.min_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.008053] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.peer_list = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.008221] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.region_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.008383] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.retriable_status_codes = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.008552] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.serial_console_state_timeout = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.008715] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.service_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.008930] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.service_type = baremetal {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.009119] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.shard = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.009292] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.009459] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.status_code_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.009620] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.status_code_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.009859] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.009993] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.010181] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ironic.version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.010373] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.010552] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] key_manager.fixed_key = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.010739] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.010906] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.barbican_api_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.011083] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.barbican_endpoint = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.011260] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.barbican_endpoint_type = public {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.011423] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.barbican_region_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.011585] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.011749] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.011918] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.012097] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.012262] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.012426] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.number_of_retries = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.012591] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.retry_delay = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.012759] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.send_service_user_token = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.012925] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.013099] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.013265] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.verify_ssl = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.013427] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican.verify_ssl_path = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.013595] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican_service_user.auth_section = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.013763] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican_service_user.auth_type = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.013922] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican_service_user.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.014095] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican_service_user.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.014266] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican_service_user.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.014429] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican_service_user.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.014587] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican_service_user.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.014750] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican_service_user.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.014907] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] barbican_service_user.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.015090] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.approle_role_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.015257] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.approle_secret_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.015418] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.015581] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.015747] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.015913] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.016090] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.016270] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.kv_mountpoint = secret {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.016433] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.kv_path = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.016602] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.kv_version = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.016774] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.namespace = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.016931] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.root_token_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.017120] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.017304] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.ssl_ca_crt_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.017450] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.017615] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.use_ssl = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.017793] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.017966] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.auth_section = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.018150] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.auth_type = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.018315] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.018478] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.018645] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.018837] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.connect_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.019016] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.connect_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.019195] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.endpoint_override = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.019363] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.019526] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.019687] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.max_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.019887] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.min_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.020105] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.region_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.020281] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.retriable_status_codes = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.020445] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.service_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.020620] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.service_type = identity {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.020786] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.020950] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.status_code_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.021130] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.status_code_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.021291] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.021476] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.021639] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] keystone.version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.021847] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.connection_uri = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.022026] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.cpu_mode = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.022202] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.022379] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.cpu_models = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.022555] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.cpu_power_governor_high = performance {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.022730] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.022921] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.cpu_power_management = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.023168] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.023353] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.device_detach_attempts = 8 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.023527] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.device_detach_timeout = 20 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.023701] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.disk_cachemodes = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.023869] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.disk_prefix = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.024053] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.enabled_perf_events = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.024226] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.file_backed_memory = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.024395] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.gid_maps = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.024557] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.hw_disk_discard = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.024726] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.hw_machine_type = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.024910] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.images_rbd_ceph_conf = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.025096] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.025270] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.025444] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.images_rbd_glance_store_name = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.025617] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.images_rbd_pool = rbd {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.025792] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.images_type = default {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.025954] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.images_volume_group = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.026135] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.inject_key = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.026303] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.inject_partition = -2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.026469] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.inject_password = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.026636] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.iscsi_iface = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.026803] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.iser_use_multipath = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.026971] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.027153] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.027320] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_downtime = 500 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.027486] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.027648] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.027811] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_inbound_addr = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.027975] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.028187] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.028382] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_scheme = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.028563] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_timeout_action = abort {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.028736] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_tunnelled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.028933] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_uri = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.029125] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.live_migration_with_native_tls = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.029294] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.max_queues = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.029462] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.029695] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.029864] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.nfs_mount_options = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.030242] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.030576] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.030634] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.030776] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.030944] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.031131] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.num_pcie_ports = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.031316] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.031488] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.pmem_namespaces = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.031652] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.quobyte_client_cfg = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.031953] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.032149] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.032318] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.032488] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.032653] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rbd_secret_uuid = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.032815] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rbd_user = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.032983] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.033174] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.033337] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rescue_image_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.033499] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rescue_kernel_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.033660] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rescue_ramdisk_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.033831] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.033994] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.rx_queue_size = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.034181] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.smbfs_mount_options = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.034460] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.034639] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.snapshot_compression = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.034806] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.snapshot_image_format = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.035043] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.035222] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.sparse_logical_volumes = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.035391] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.swtpm_enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.035569] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.swtpm_group = tss {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.035744] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.swtpm_user = tss {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.035917] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.sysinfo_serial = unique {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.036096] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.tb_cache_size = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.036262] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.tx_queue_size = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.036433] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.uid_maps = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.036600] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.use_virtio_for_bridges = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.036777] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.virt_type = kvm {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.036953] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.volume_clear = zero {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.037137] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.volume_clear_size = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.037310] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.volume_use_multipath = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.037527] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.vzstorage_cache_path = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.037646] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.037819] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.037992] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.038180] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.038458] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.038639] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.vzstorage_mount_user = stack {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.038835] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.039024] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.auth_section = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.039209] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.auth_type = password {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.039375] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.039540] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.039710] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.039913] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.connect_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.040113] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.connect_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.040299] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.default_floating_pool = public {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.040465] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.endpoint_override = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.040632] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.extension_sync_interval = 600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.040797] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.http_retries = 3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.040966] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.041143] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.041307] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.max_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.041482] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.041645] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.min_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.041821] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.ovs_bridge = br-int {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.041995] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.physnets = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.042182] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.region_name = RegionOne {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.042349] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.retriable_status_codes = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.042524] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.service_metadata_proxy = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.042690] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.service_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.042864] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.service_type = network {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.043045] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.043214] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.status_code_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.043377] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.status_code_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.043538] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.043723] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.043899] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] neutron.version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.044095] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] notifications.bdms_in_notifications = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.044282] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] notifications.default_level = INFO {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.044465] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] notifications.notification_format = unversioned {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.044633] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] notifications.notify_on_state_change = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.044815] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.044995] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] pci.alias = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.045185] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] pci.device_spec = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.045353] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] pci.report_in_placement = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.045527] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.auth_section = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.045703] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.auth_type = password {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.045875] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.046050] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.046219] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.046385] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.046548] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.connect_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.046707] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.connect_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.046867] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.default_domain_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.047040] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.default_domain_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.047203] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.domain_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.047363] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.domain_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.047523] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.endpoint_override = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.047689] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.047850] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.048102] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.max_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.048181] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.min_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.048347] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.password = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.048507] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.project_domain_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.048674] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.project_domain_name = Default {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.048869] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.project_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.049070] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.project_name = service {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.049253] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.region_name = RegionOne {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.049419] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.retriable_status_codes = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.049582] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.service_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.049755] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.service_type = placement {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.049975] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.050175] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.status_code_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.050344] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.status_code_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.050506] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.system_scope = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.050666] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.050831] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.trust_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.050997] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.user_domain_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.051187] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.user_domain_name = Default {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.051351] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.user_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.051529] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.username = placement {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.051714] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.051879] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] placement.version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.052072] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.cores = 20 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.052244] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.count_usage_from_placement = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.052420] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.052599] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.injected_file_content_bytes = 10240 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.052769] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.injected_file_path_length = 255 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.052937] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.injected_files = 5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.053124] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.instances = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.053295] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.key_pairs = 100 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.053466] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.metadata_items = 128 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.053635] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.ram = 51200 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.053859] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.recheck_quota = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.053967] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.server_group_members = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.054151] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] quota.server_groups = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.054327] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.054522] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.054839] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.image_metadata_prefilter = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.054900] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.055049] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.max_attempts = 3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.055222] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.max_placement_results = 1000 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.055387] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.055553] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.055716] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.055893] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] scheduler.workers = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.056086] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.056265] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.056448] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.056625] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.056794] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.056960] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.057142] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.057337] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.057506] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.host_subset_size = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.057680] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.057838] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.058010] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.058188] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.isolated_hosts = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.058356] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.isolated_images = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.058523] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.058690] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.058898] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060172] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.pci_in_placement = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060172] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060172] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060172] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060172] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060172] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060414] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060414] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.track_instance_changes = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060466] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060596] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] metrics.required = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060763] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] metrics.weight_multiplier = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.060932] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.061113] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] metrics.weight_setting = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.061434] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.061616] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] serial_console.enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.061800] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] serial_console.port_range = 10000:20000 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.061977] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.062167] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.062336] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] serial_console.serialproxy_port = 6083 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.062504] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.auth_section = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.062679] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.auth_type = password {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.062843] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.063019] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.063189] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.063355] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.063517] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.063692] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.send_service_user_token = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.063857] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.064030] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] service_user.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.064222] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.agent_enabled = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.064387] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.064707] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.064908] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.065096] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.html5proxy_port = 6082 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.065265] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.image_compression = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.065432] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.jpeg_compression = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.065595] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.playback_compression = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.065768] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.server_listen = 127.0.0.1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.065941] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.066117] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.streaming_mode = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.066280] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] spice.zlib_compression = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.066448] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] upgrade_levels.baseapi = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.066624] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] upgrade_levels.compute = auto {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.066789] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] upgrade_levels.conductor = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.066951] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] upgrade_levels.scheduler = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.067138] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.067305] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.067551] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vendordata_dynamic_auth.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.067873] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vendordata_dynamic_auth.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.068121] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.068348] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vendordata_dynamic_auth.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.068526] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.068698] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.068928] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vendordata_dynamic_auth.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.069246] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.api_retry_count = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.069519] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.ca_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.069727] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.069932] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.cluster_name = testcl1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.070126] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.connection_pool_size = 10 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.070294] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.console_delay_seconds = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.070472] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.datastore_regex = ^datastore.* {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.070691] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.070872] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.host_password = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.071068] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.host_port = 443 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.071272] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.host_username = administrator@vsphere.local {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.071450] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.insecure = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.071615] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.integration_bridge = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.071785] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.maximum_objects = 100 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.071952] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.pbm_default_policy = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.072139] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.pbm_enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.072306] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.pbm_wsdl_location = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.072482] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.072652] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.serial_port_proxy_uri = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.072813] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.serial_port_service_uri = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.072986] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.task_poll_interval = 0.5 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.073182] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.use_linked_clone = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.073357] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.vnc_keymap = en-us {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.073526] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.vnc_port = 5900 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.073693] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vmware.vnc_port_total = 10000 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.073884] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.auth_schemes = ['none'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.074077] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.074415] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.074610] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.074789] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.novncproxy_port = 6080 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.074971] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.server_listen = 127.0.0.1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.075167] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.075332] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.vencrypt_ca_certs = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.075496] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.vencrypt_client_cert = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.075658] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vnc.vencrypt_client_key = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.075840] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.076015] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.disable_deep_image_inspection = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.076214] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.076373] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.076539] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.076715] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.disable_rootwrap = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.076904] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.enable_numa_live_migration = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.077101] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.077263] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.077426] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.077590] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.libvirt_disable_apic = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.077755] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.077923] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.078128] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.078272] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.078437] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.078600] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.078762] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.078955] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.079143] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.079315] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.079502] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.079677] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.client_socket_timeout = 900 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.079873] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.default_pool_size = 1000 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.080070] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.keep_alive = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.080260] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.max_header_line = 16384 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.080430] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.080595] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.ssl_ca_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.080757] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.ssl_cert_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.080919] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.ssl_key_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.081101] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.tcp_keepidle = 600 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.081284] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.081454] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] zvm.ca_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.081617] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] zvm.cloud_connector_url = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.081914] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.082104] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] zvm.reachable_timeout = 300 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.082291] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.enforce_new_defaults = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.082465] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.enforce_scope = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.082645] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.policy_default_rule = default {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.082854] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.083060] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.policy_file = policy.yaml {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.083244] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.083409] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.083571] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.083733] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.083897] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.084084] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.084265] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.084446] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.connection_string = messaging:// {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.084616] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.enabled = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.084790] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.es_doc_type = notification {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.084957] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.es_scroll_size = 10000 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.085144] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.es_scroll_time = 2m {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.085310] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.filter_error_trace = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.085480] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.hmac_keys = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.085649] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.sentinel_service_name = mymaster {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.085837] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.socket_timeout = 0.1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.086036] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.trace_requests = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.086210] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler.trace_sqlalchemy = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.086393] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler_jaeger.process_tags = {} {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.086560] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler_jaeger.service_name_prefix = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.086729] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] profiler_otlp.service_name_prefix = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.086897] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] remote_debug.host = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.087073] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] remote_debug.port = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.087261] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.087426] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.087591] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.087758] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.087924] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.088151] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.088270] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.088436] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.088600] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.088775] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.088968] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.089208] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.089390] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.089567] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.089745] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.089922] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.090100] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.090283] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.090451] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.090618] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.090788] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.090957] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.091142] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.091314] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.091479] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.091643] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.091804] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.091998] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.092191] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.092365] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.ssl = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.092542] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.092717] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.092883] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.093075] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.093253] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.093423] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.093615] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.093790] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_notifications.retry = -1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.093975] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.094171] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.094350] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.auth_section = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.094518] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.auth_type = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.094679] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.cafile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.094840] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.certfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.095052] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.collect_timing = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.095227] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.connect_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.095393] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.connect_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.095556] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.endpoint_id = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.095719] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.endpoint_override = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.095886] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.insecure = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.096060] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.keyfile = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.096226] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.max_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.096387] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.min_version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.096548] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.region_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.096712] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.retriable_status_codes = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.096872] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.service_name = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.097045] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.service_type = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.097219] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.split_loggers = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.097374] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.status_code_retries = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.097534] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.status_code_retry_delay = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.097694] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.timeout = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.097856] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.valid_interfaces = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.098058] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_limit.version = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.098261] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_reports.file_event_handler = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.098403] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.098566] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] oslo_reports.log_dir = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.098741] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.098933] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.099122] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.099299] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.099469] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.099634] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.099825] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.100016] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_ovs_privileged.group = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.100188] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.100359] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.100525] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.100687] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] vif_plug_ovs_privileged.user = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.100862] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.101092] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.101284] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.101462] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.101639] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.101812] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.101982] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.102162] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.102352] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.102526] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_ovs.isolate_vif = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.102702] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.102875] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.103061] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.103240] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.103409] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_vif_ovs.per_port_bridge = False {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.103580] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_brick.lock_path = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.103748] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.103915] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.104129] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] privsep_osbrick.capabilities = [21] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.104300] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] privsep_osbrick.group = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.104462] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] privsep_osbrick.helper_command = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.104630] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.104796] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.104960] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] privsep_osbrick.user = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.105155] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.105323] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] nova_sys_admin.group = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.105479] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] nova_sys_admin.helper_command = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.105646] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.105811] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.105976] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] nova_sys_admin.user = None {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 499.106120] env[61573]: DEBUG oslo_service.service [None req-c650bc5b-3428-4c6c-86b6-423ce110f161 None None] ******************************************************************************** {{(pid=61573) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 499.106923] env[61573]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 499.117098] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Getting list of instances from cluster (obj){ [ 499.117098] env[61573]: value = "domain-c8" [ 499.117098] env[61573]: _type = "ClusterComputeResource" [ 499.117098] env[61573]: } {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 499.118415] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799e2b14-7c82-4c18-8dfe-a8dc256caa8f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.127937] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Got total of 0 instances {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 499.128507] env[61573]: WARNING nova.virt.vmwareapi.driver [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 499.129026] env[61573]: INFO nova.virt.node [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Generated node identity b1eff98b-2b30-4574-a87d-d151235a2dba [ 499.129263] env[61573]: INFO nova.virt.node [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Wrote node identity b1eff98b-2b30-4574-a87d-d151235a2dba to /opt/stack/data/n-cpu-1/compute_id [ 499.143269] env[61573]: WARNING nova.compute.manager [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Compute nodes ['b1eff98b-2b30-4574-a87d-d151235a2dba'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 499.179360] env[61573]: INFO nova.compute.manager [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 499.204854] env[61573]: WARNING nova.compute.manager [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 499.205102] env[61573]: DEBUG oslo_concurrency.lockutils [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 499.205328] env[61573]: DEBUG oslo_concurrency.lockutils [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 499.205479] env[61573]: DEBUG oslo_concurrency.lockutils [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 499.205628] env[61573]: DEBUG nova.compute.resource_tracker [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 499.206744] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179ab55c-5a63-4e35-a868-7d2ac2cebc95 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.215459] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26efa9e3-4fbb-4a32-9f72-d7849203874f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.230428] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f78262-91e2-40de-86a4-c00f8fbbc4ed {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.237438] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2e05d6-14ed-4716-9117-121b1eac61a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.268181] env[61573]: DEBUG nova.compute.resource_tracker [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180552MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 499.268181] env[61573]: DEBUG oslo_concurrency.lockutils [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 499.268181] env[61573]: DEBUG oslo_concurrency.lockutils [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 499.280541] env[61573]: WARNING nova.compute.resource_tracker [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] No compute node record for cpu-1:b1eff98b-2b30-4574-a87d-d151235a2dba: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host b1eff98b-2b30-4574-a87d-d151235a2dba could not be found. [ 499.293580] env[61573]: INFO nova.compute.resource_tracker [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: b1eff98b-2b30-4574-a87d-d151235a2dba [ 499.348531] env[61573]: DEBUG nova.compute.resource_tracker [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 499.348713] env[61573]: DEBUG nova.compute.resource_tracker [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 499.455021] env[61573]: INFO nova.scheduler.client.report [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] [req-bb7c6564-1d73-43a7-a8db-471867bd9a74] Created resource provider record via placement API for resource provider with UUID b1eff98b-2b30-4574-a87d-d151235a2dba and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 499.473047] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067d0378-a18b-4954-b5b4-6b2edfaf4096 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.479821] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592f3147-5eed-4447-954d-1fc2ce4cf075 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.510661] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3230cd63-3b07-412b-99f1-f56644adcd9b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.519167] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44613cea-d9de-4ca6-a77a-97041ca96a79 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.533951] env[61573]: DEBUG nova.compute.provider_tree [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Updating inventory in ProviderTree for provider b1eff98b-2b30-4574-a87d-d151235a2dba with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 499.576262] env[61573]: DEBUG nova.scheduler.client.report [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Updated inventory for provider b1eff98b-2b30-4574-a87d-d151235a2dba with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 499.576571] env[61573]: DEBUG nova.compute.provider_tree [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Updating resource provider b1eff98b-2b30-4574-a87d-d151235a2dba generation from 0 to 1 during operation: update_inventory {{(pid=61573) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 499.576793] env[61573]: DEBUG nova.compute.provider_tree [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Updating inventory in ProviderTree for provider b1eff98b-2b30-4574-a87d-d151235a2dba with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 499.626791] env[61573]: DEBUG nova.compute.provider_tree [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Updating resource provider b1eff98b-2b30-4574-a87d-d151235a2dba generation from 1 to 2 during operation: update_traits {{(pid=61573) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 499.644989] env[61573]: DEBUG nova.compute.resource_tracker [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 499.645216] env[61573]: DEBUG oslo_concurrency.lockutils [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.377s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 499.645376] env[61573]: DEBUG nova.service [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Creating RPC server for service compute {{(pid=61573) start /opt/stack/nova/nova/service.py:182}} [ 499.662580] env[61573]: DEBUG nova.service [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] Join ServiceGroup membership for this service compute {{(pid=61573) start /opt/stack/nova/nova/service.py:199}} [ 499.662775] env[61573]: DEBUG nova.servicegroup.drivers.db [None req-2a8f3522-ab68-4297-abfb-da7af644b759 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61573) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 508.938994] env[61573]: DEBUG dbcounter [-] [61573] Writing DB stats nova_cell1:SELECT=1 {{(pid=61573) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 508.941415] env[61573]: DEBUG dbcounter [-] [61573] Writing DB stats nova_cell0:SELECT=1 {{(pid=61573) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 533.664720] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.682804] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Getting list of instances from cluster (obj){ [ 533.682804] env[61573]: value = "domain-c8" [ 533.682804] env[61573]: _type = "ClusterComputeResource" [ 533.682804] env[61573]: } {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 533.683996] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89178f7-1a6a-40a5-b6e2-0285712c7915 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.694186] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Got total of 0 instances {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 533.694440] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.694770] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Getting list of instances from cluster (obj){ [ 533.694770] env[61573]: value = "domain-c8" [ 533.694770] env[61573]: _type = "ClusterComputeResource" [ 533.694770] env[61573]: } {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 533.696095] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e10606-186f-44e6-80f7-8111453e4843 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.704702] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Got total of 0 instances {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 545.447650] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "1da85728-bbee-4605-bf03-d49a650c4d1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.448104] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "1da85728-bbee-4605-bf03-d49a650c4d1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.478540] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 545.618933] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.619237] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.622276] env[61573]: INFO nova.compute.claims [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 545.791236] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8083c7e2-b6c7-4843-8f04-b85123e5ec73 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.800677] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963114ae-5027-4709-93c8-225456ce1397 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.855172] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4cf51e-c238-40d7-b27a-218b121563a1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.864608] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e5e73a-030c-45b4-b92b-b5a983e34d16 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.881435] env[61573]: DEBUG nova.compute.provider_tree [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.897973] env[61573]: DEBUG nova.scheduler.client.report [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 545.924986] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.305s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.925478] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 545.985189] env[61573]: DEBUG nova.compute.utils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.993043] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 545.993043] env[61573]: DEBUG nova.network.neutron [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 546.016886] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 546.170664] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 546.375857] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.375857] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.375857] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.376461] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.376461] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.376461] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.376554] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.376647] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.377026] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.377209] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.377380] env[61573]: DEBUG nova.virt.hardware [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.378348] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd054d8-0715-4f92-ba09-bf627f99257d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.391040] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff9a952-4468-4fa7-aaf2-2b4b4e1412d8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.413970] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a859b738-77fe-4997-85a6-00817c37669d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.564232] env[61573]: DEBUG nova.policy [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe89159c64fa44ab8ee9471df3f082a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '415cc2d7de384f7ca1b8c26e32974978', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 547.770257] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquiring lock "8725d6e7-57cc-4d55-b21a-4aee65d5228b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.770641] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Lock "8725d6e7-57cc-4d55-b21a-4aee65d5228b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.785944] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 547.859673] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.860776] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.861451] env[61573]: INFO nova.compute.claims [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 547.998188] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792f7cc3-ccea-4270-8501-5e79785754d3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.006375] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a085f035-5323-45e9-8325-db6906bb6963 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.044673] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e0a761-e422-4ee5-b567-09a036f043b0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.052412] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c238d4-5ac3-41ea-ad84-2382134840df {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.069449] env[61573]: DEBUG nova.compute.provider_tree [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.082013] env[61573]: DEBUG nova.scheduler.client.report [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 548.098852] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.239s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.099645] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 548.136662] env[61573]: DEBUG nova.compute.utils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 548.138412] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 548.138726] env[61573]: DEBUG nova.network.neutron [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 548.149511] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 548.225987] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 548.254696] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 548.254854] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 548.254971] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.255171] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 548.255318] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.255510] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 548.255743] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 548.255903] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 548.256092] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 548.256321] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 548.256481] env[61573]: DEBUG nova.virt.hardware [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 548.257327] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7451049-b5f2-40f2-8bf4-490bf2088b23 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.266687] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f61b118-df1d-4658-bac3-463eed92682e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.357351] env[61573]: DEBUG nova.policy [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2a47eb8dbb14bf699ded0e0b2dbe157', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fde71aa69dbe431bb75848f76999b7d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 548.605544] env[61573]: DEBUG nova.network.neutron [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Successfully created port: 9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.044546] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquiring lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.044956] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.071726] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 549.155750] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.155983] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.157997] env[61573]: INFO nova.compute.claims [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.344114] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad06529a-57c0-4123-8d34-de14687022b0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.353793] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32c4eab-7933-4d7e-ac69-6ed1c1d325b7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.396320] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235bf29d-cd44-4da6-8f48-ff1cb263431d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.408284] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51bae6c0-52fa-4e36-9db6-ed581c37d14d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.424951] env[61573]: DEBUG nova.compute.provider_tree [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.445880] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquiring lock "933ffe74-6883-4676-9be8-f12e45be35e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.445880] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Lock "933ffe74-6883-4676-9be8-f12e45be35e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.445880] env[61573]: DEBUG nova.scheduler.client.report [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.460961] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 549.481930] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.482587] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 549.548287] env[61573]: DEBUG nova.compute.utils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.549606] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 549.549772] env[61573]: DEBUG nova.network.neutron [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 549.560206] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.564017] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.564017] env[61573]: INFO nova.compute.claims [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.576157] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 549.709850] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 549.745876] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 549.745876] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 549.745876] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.747273] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 549.747340] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.747730] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 549.747973] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 549.748154] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 549.748324] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 549.748490] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 549.748665] env[61573]: DEBUG nova.virt.hardware [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 549.749905] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be461366-f58c-433b-abb2-5d0e549d4507 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.764549] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed862c3d-5d62-40d4-8d4d-31d59a41b45c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.782580] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e497eaa-0c36-43e8-8947-b1d4a7196925 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.790693] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e206b94-a411-41fe-a408-3038d40deb74 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.830029] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54afc685-90ad-4f7b-992b-f98ea424e9da {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.841909] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1548efa0-97c1-4105-852c-be9e48b499cc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.868020] env[61573]: DEBUG nova.compute.provider_tree [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.878633] env[61573]: DEBUG nova.scheduler.client.report [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.897130] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.336s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.898048] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 549.947080] env[61573]: DEBUG nova.compute.utils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.949555] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 549.949798] env[61573]: DEBUG nova.network.neutron [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 549.965679] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 550.037295] env[61573]: DEBUG nova.policy [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85502da2f65e43649a3bd28659057c40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03ab001ffb3148e7bdb7b1d4dfa3ea88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 550.089404] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 550.119917] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 550.120181] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 550.120339] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 550.120519] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 550.120828] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 550.120939] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 550.121157] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 550.121321] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 550.121711] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 550.121711] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 550.121815] env[61573]: DEBUG nova.virt.hardware [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 550.122697] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4667dd2-6985-495e-ac0d-94946235d9ec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.132535] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275b39ad-6a8b-4129-a8f2-96a58c1f1337 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.150850] env[61573]: DEBUG nova.network.neutron [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Successfully created port: 6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.289012] env[61573]: DEBUG nova.policy [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '669b0fba9cd046529709300ba646b5c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3aabf5c6a1ed4a799a8d230cb0e37107', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 551.349214] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "da95334f-ebbf-4a7f-8492-ca310028c4dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.349606] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "da95334f-ebbf-4a7f-8492-ca310028c4dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.382263] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 551.492915] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.493218] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.498756] env[61573]: INFO nova.compute.claims [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.683318] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29648f63-7c97-4dbe-8566-c13d422bd440 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.697525] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cde32f-add9-47b2-8f4f-a06f52da0304 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.735092] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23a2d1b-102d-490f-96f1-ed09532ede14 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.744707] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a108ce5-2f0b-466c-91a4-e42ef39072a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.762883] env[61573]: DEBUG nova.compute.provider_tree [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.776546] env[61573]: DEBUG nova.scheduler.client.report [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 551.799951] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.800950] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 551.853240] env[61573]: DEBUG nova.compute.utils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.855110] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 551.855110] env[61573]: DEBUG nova.network.neutron [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 551.867536] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 551.945583] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 551.983658] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 551.984031] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 551.984116] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.984302] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 551.984536] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.984764] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 551.985227] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 551.985659] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 551.985987] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 551.986278] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 551.986565] env[61573]: DEBUG nova.virt.hardware [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 551.988659] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc3de8d-9ec0-4bb3-a943-4b5b3af7816a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.001021] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd81bb04-953e-440b-89c7-302e38193acc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.196849] env[61573]: DEBUG nova.network.neutron [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Successfully created port: 2af8ff4d-df9f-4b9d-b494-eaa05007940e {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.260667] env[61573]: DEBUG nova.network.neutron [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Successfully created port: e46ab9c3-7f95-4d65-98fe-9728a8439324 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.515128] env[61573]: DEBUG nova.policy [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b30dde10b52346608878962716400f6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad80964934624fed809a3285c0f1b748', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 552.781721] env[61573]: DEBUG nova.network.neutron [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Successfully updated port: 9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 552.805072] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "refresh_cache-1da85728-bbee-4605-bf03-d49a650c4d1e" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.805072] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquired lock "refresh_cache-1da85728-bbee-4605-bf03-d49a650c4d1e" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.805072] env[61573]: DEBUG nova.network.neutron [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.056592] env[61573]: DEBUG nova.network.neutron [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.686666] env[61573]: DEBUG nova.network.neutron [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Successfully updated port: 6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 553.707596] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquiring lock "refresh_cache-8725d6e7-57cc-4d55-b21a-4aee65d5228b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.707596] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquired lock "refresh_cache-8725d6e7-57cc-4d55-b21a-4aee65d5228b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.707596] env[61573]: DEBUG nova.network.neutron [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.951278] env[61573]: DEBUG nova.network.neutron [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.283077] env[61573]: DEBUG nova.network.neutron [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Updating instance_info_cache with network_info: [{"id": "9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3", "address": "fa:16:3e:8e:51:0e", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa249eb-f1", "ovs_interfaceid": "9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.330153] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Releasing lock "refresh_cache-1da85728-bbee-4605-bf03-d49a650c4d1e" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.330153] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Instance network_info: |[{"id": "9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3", "address": "fa:16:3e:8e:51:0e", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa249eb-f1", "ovs_interfaceid": "9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 554.330363] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:51:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4c015b-4a8b-46ca-9556-74bad8db9fb3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 554.353698] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.356450] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dedb7ba7-d686-418f-a7d2-8d6bbee7613c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.375155] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Created folder: OpenStack in parent group-v4. [ 554.375751] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Creating folder: Project (415cc2d7de384f7ca1b8c26e32974978). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.379891] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c929f7ea-c0ea-4cb2-9899-f2aba04ae1c7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.392262] env[61573]: DEBUG nova.compute.manager [req-1fffb48a-5851-4dc5-8d8a-c70693c39423 req-5af7fe8d-905c-4f9d-b52b-3cece8b3c8db service nova] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Received event network-vif-plugged-9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 554.392609] env[61573]: DEBUG oslo_concurrency.lockutils [req-1fffb48a-5851-4dc5-8d8a-c70693c39423 req-5af7fe8d-905c-4f9d-b52b-3cece8b3c8db service nova] Acquiring lock "1da85728-bbee-4605-bf03-d49a650c4d1e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.392918] env[61573]: DEBUG oslo_concurrency.lockutils [req-1fffb48a-5851-4dc5-8d8a-c70693c39423 req-5af7fe8d-905c-4f9d-b52b-3cece8b3c8db service nova] Lock "1da85728-bbee-4605-bf03-d49a650c4d1e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.392918] env[61573]: DEBUG oslo_concurrency.lockutils [req-1fffb48a-5851-4dc5-8d8a-c70693c39423 req-5af7fe8d-905c-4f9d-b52b-3cece8b3c8db service nova] Lock "1da85728-bbee-4605-bf03-d49a650c4d1e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.393160] env[61573]: DEBUG nova.compute.manager [req-1fffb48a-5851-4dc5-8d8a-c70693c39423 req-5af7fe8d-905c-4f9d-b52b-3cece8b3c8db service nova] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] No waiting events found dispatching network-vif-plugged-9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 554.393672] env[61573]: WARNING nova.compute.manager [req-1fffb48a-5851-4dc5-8d8a-c70693c39423 req-5af7fe8d-905c-4f9d-b52b-3cece8b3c8db service nova] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Received unexpected event network-vif-plugged-9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3 for instance with vm_state building and task_state spawning. [ 554.396990] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Created folder: Project (415cc2d7de384f7ca1b8c26e32974978) in parent group-v942801. [ 554.397885] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Creating folder: Instances. Parent ref: group-v942802. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.397885] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7529bb6f-9201-4038-9a8f-6f5cf00afc34 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.415132] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Created folder: Instances in parent group-v942802. [ 554.415389] env[61573]: DEBUG oslo.service.loopingcall [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.415604] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 554.416434] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd98b81f-e76b-4945-89b6-9b2c17e78942 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.446429] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 554.446429] env[61573]: value = "task-4836705" [ 554.446429] env[61573]: _type = "Task" [ 554.446429] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.457290] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836705, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.724024] env[61573]: DEBUG nova.network.neutron [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Successfully created port: 840c1ae5-5ecf-405c-bb16-04958928252d {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 554.955602] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836705, 'name': CreateVM_Task, 'duration_secs': 0.425441} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.955767] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 554.983489] env[61573]: DEBUG oslo_vmware.service [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e75d712-bf8c-45a6-90f2-468e700b89c1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.991609] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.991778] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.992490] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 554.992763] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36abf4df-ab65-4b33-89c6-fcda464cdc76 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.002827] env[61573]: DEBUG oslo_vmware.api [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for the task: (returnval){ [ 555.002827] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52933dc1-ec12-8020-6ff7-9d77705567c6" [ 555.002827] env[61573]: _type = "Task" [ 555.002827] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.012407] env[61573]: DEBUG oslo_vmware.api [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52933dc1-ec12-8020-6ff7-9d77705567c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.361282] env[61573]: DEBUG nova.network.neutron [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Updating instance_info_cache with network_info: [{"id": "6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c", "address": "fa:16:3e:75:ac:b7", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba7f350-b6", "ovs_interfaceid": "6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.385306] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Releasing lock "refresh_cache-8725d6e7-57cc-4d55-b21a-4aee65d5228b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.385990] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Instance network_info: |[{"id": "6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c", "address": "fa:16:3e:75:ac:b7", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba7f350-b6", "ovs_interfaceid": "6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 555.386086] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:ac:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4c015b-4a8b-46ca-9556-74bad8db9fb3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 555.401532] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Creating folder: Project (fde71aa69dbe431bb75848f76999b7d2). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.407876] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b37c4ea5-a874-4eff-80a4-9c1477389bbc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.416597] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.421194] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.421689] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 555.421966] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 555.427033] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Created folder: Project (fde71aa69dbe431bb75848f76999b7d2) in parent group-v942801. [ 555.427033] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Creating folder: Instances. Parent ref: group-v942805. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.427033] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb03a3a3-18ba-4f20-8359-8632aff89e99 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.439232] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Created folder: Instances in parent group-v942805. [ 555.439232] env[61573]: DEBUG oslo.service.loopingcall [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.439232] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 555.439232] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1fac6f2-a5b4-446f-96b3-5cd170beff24 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.464063] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 555.464389] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 555.464617] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 555.464833] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 555.465080] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 555.465601] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 555.466602] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.466986] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.467314] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.469537] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.469537] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.469537] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.469537] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 555.469537] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.472793] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 555.472793] env[61573]: value = "task-4836708" [ 555.472793] env[61573]: _type = "Task" [ 555.472793] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.487062] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836708, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.490585] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.490585] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.490585] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.490585] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 555.490585] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f45ece-5193-4ed5-9146-3b9916d015ca {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.499282] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fc0e05-256a-4564-aa94-8af146e2c973 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.527666] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6966dab4-b6de-474e-9f02-39b5c6590d3f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.543694] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.543849] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 555.543993] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.545246] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.546746] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 555.546746] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-232dbbdc-0411-4329-b228-2c9e205cb28e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.551782] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5b7db1-1c48-4008-9c15-6509076fb222 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.600981] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180566MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 555.601179] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.601403] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.606484] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 555.606698] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 555.608337] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deed6090-7281-4ef4-a8cd-768ec117f9b4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.622019] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21197de5-a5da-4b93-8119-2289596f237e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.628699] env[61573]: DEBUG oslo_vmware.api [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for the task: (returnval){ [ 555.628699] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d600f5-6ad0-ad97-9c53-eb99056ba877" [ 555.628699] env[61573]: _type = "Task" [ 555.628699] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.645199] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 555.645541] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Creating directory with path [datastore2] vmware_temp/3f6d1aa8-1e66-4c34-8bad-f1e59c5c416d/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 555.645868] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-985e4b16-a524-414d-82da-595a5704b5cf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.670914] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Created directory with path [datastore2] vmware_temp/3f6d1aa8-1e66-4c34-8bad-f1e59c5c416d/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 555.672570] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Fetch image to [datastore2] vmware_temp/3f6d1aa8-1e66-4c34-8bad-f1e59c5c416d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 555.672570] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/3f6d1aa8-1e66-4c34-8bad-f1e59c5c416d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 555.673689] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafaaae1-0710-4c3f-9cd8-1d617718145f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.687129] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce8f6d2-591a-4799-b4e4-a475b22d2bf4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.705038] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defab771-f7fe-4fa4-b2c1-eb5f68d77994 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.746323] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1da85728-bbee-4605-bf03-d49a650c4d1e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 555.746624] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 555.746664] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8725d6e7-57cc-4d55-b21a-4aee65d5228b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 555.746793] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 933ffe74-6883-4676-9be8-f12e45be35e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 555.746897] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance da95334f-ebbf-4a7f-8492-ca310028c4dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 555.747156] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 555.747261] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=100GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '5', 'num_vm_building': '5', 'num_task_spawning': '5', 'num_os_type_None': '5', 'num_proj_415cc2d7de384f7ca1b8c26e32974978': '1', 'io_workload': '5', 'num_proj_fde71aa69dbe431bb75848f76999b7d2': '1', 'num_proj_03ab001ffb3148e7bdb7b1d4dfa3ea88': '1', 'num_proj_3aabf5c6a1ed4a799a8d230cb0e37107': '1', 'num_proj_ad80964934624fed809a3285c0f1b748': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 555.750490] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357c09c4-357c-45e7-94e5-f076213c71a4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.759316] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a62b1d1a-eb5e-4e66-8ade-1ea2889a052b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.875404] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 555.884017] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae60176c-168c-4c6e-9852-a2f3d595e3a0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.892127] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d499a668-db1d-4ea0-bdc6-3fe510e4fee4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.929720] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a706b8-0aaa-4232-9e47-4c54c5d96a96 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.940480] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e5b02c-5f83-4158-b55a-f42fbfd7a4f3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.967874] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.986166] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.996125] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836708, 'name': CreateVM_Task, 'duration_secs': 0.394273} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.996210] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 555.996792] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.996943] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.997262] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 555.998584] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2c082aa-62e3-4632-8a43-ff6e8288fc80 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.004415] env[61573]: DEBUG nova.compute.manager [req-ef429316-4dea-42a7-9000-6bca9d9c1e30 req-c57040c3-b9a2-4a06-b654-dbdae66ca9ed service nova] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Received event network-vif-plugged-6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 556.004415] env[61573]: DEBUG oslo_concurrency.lockutils [req-ef429316-4dea-42a7-9000-6bca9d9c1e30 req-c57040c3-b9a2-4a06-b654-dbdae66ca9ed service nova] Acquiring lock "8725d6e7-57cc-4d55-b21a-4aee65d5228b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.004415] env[61573]: DEBUG oslo_concurrency.lockutils [req-ef429316-4dea-42a7-9000-6bca9d9c1e30 req-c57040c3-b9a2-4a06-b654-dbdae66ca9ed service nova] Lock "8725d6e7-57cc-4d55-b21a-4aee65d5228b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.004415] env[61573]: DEBUG oslo_concurrency.lockutils [req-ef429316-4dea-42a7-9000-6bca9d9c1e30 req-c57040c3-b9a2-4a06-b654-dbdae66ca9ed service nova] Lock "8725d6e7-57cc-4d55-b21a-4aee65d5228b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.004594] env[61573]: DEBUG nova.compute.manager [req-ef429316-4dea-42a7-9000-6bca9d9c1e30 req-c57040c3-b9a2-4a06-b654-dbdae66ca9ed service nova] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] No waiting events found dispatching network-vif-plugged-6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 556.004594] env[61573]: WARNING nova.compute.manager [req-ef429316-4dea-42a7-9000-6bca9d9c1e30 req-c57040c3-b9a2-4a06-b654-dbdae66ca9ed service nova] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Received unexpected event network-vif-plugged-6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c for instance with vm_state building and task_state spawning. [ 556.007617] env[61573]: DEBUG oslo_vmware.api [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Waiting for the task: (returnval){ [ 556.007617] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52f79f29-10e3-4410-cdac-3f886c0b4671" [ 556.007617] env[61573]: _type = "Task" [ 556.007617] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.018884] env[61573]: DEBUG oslo_vmware.api [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52f79f29-10e3-4410-cdac-3f886c0b4671, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.020936] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 556.021166] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.420s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.042769] env[61573]: DEBUG oslo_vmware.rw_handles [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3f6d1aa8-1e66-4c34-8bad-f1e59c5c416d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 556.116330] env[61573]: DEBUG oslo_vmware.rw_handles [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 556.116330] env[61573]: DEBUG oslo_vmware.rw_handles [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3f6d1aa8-1e66-4c34-8bad-f1e59c5c416d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 556.282403] env[61573]: DEBUG nova.network.neutron [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Successfully updated port: e46ab9c3-7f95-4d65-98fe-9728a8439324 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 556.299078] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquiring lock "refresh_cache-72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.299431] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquired lock "refresh_cache-72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.299681] env[61573]: DEBUG nova.network.neutron [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 556.372590] env[61573]: DEBUG nova.network.neutron [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Successfully updated port: 2af8ff4d-df9f-4b9d-b494-eaa05007940e {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 556.389496] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquiring lock "refresh_cache-933ffe74-6883-4676-9be8-f12e45be35e0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.389617] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquired lock "refresh_cache-933ffe74-6883-4676-9be8-f12e45be35e0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.389770] env[61573]: DEBUG nova.network.neutron [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 556.524473] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.524557] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 556.526464] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.557343] env[61573]: DEBUG nova.network.neutron [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.637763] env[61573]: DEBUG nova.network.neutron [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.692806] env[61573]: DEBUG nova.network.neutron [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Updating instance_info_cache with network_info: [{"id": "2af8ff4d-df9f-4b9d-b494-eaa05007940e", "address": "fa:16:3e:cd:ab:64", "network": {"id": "afa46ba6-d826-487c-8a2e-b9edcc239bb5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-387261800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3aabf5c6a1ed4a799a8d230cb0e37107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2af8ff4d-df", "ovs_interfaceid": "2af8ff4d-df9f-4b9d-b494-eaa05007940e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.715166] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Releasing lock "refresh_cache-933ffe74-6883-4676-9be8-f12e45be35e0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.717585] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Instance network_info: |[{"id": "2af8ff4d-df9f-4b9d-b494-eaa05007940e", "address": "fa:16:3e:cd:ab:64", "network": {"id": "afa46ba6-d826-487c-8a2e-b9edcc239bb5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-387261800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3aabf5c6a1ed4a799a8d230cb0e37107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2af8ff4d-df", "ovs_interfaceid": "2af8ff4d-df9f-4b9d-b494-eaa05007940e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 557.718811] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:ab:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2af8ff4d-df9f-4b9d-b494-eaa05007940e', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.730546] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Creating folder: Project (3aabf5c6a1ed4a799a8d230cb0e37107). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.730546] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8e86a42-98d4-43c0-b0ae-5babbc7584fa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.742491] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Created folder: Project (3aabf5c6a1ed4a799a8d230cb0e37107) in parent group-v942801. [ 557.742758] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Creating folder: Instances. Parent ref: group-v942808. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.742995] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-799cf9e6-9561-4844-99ea-980c7ca20277 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.761162] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Created folder: Instances in parent group-v942808. [ 557.761162] env[61573]: DEBUG oslo.service.loopingcall [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.761162] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 557.761162] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2de6de3-2416-4aae-bb80-754704d8b93c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.789124] env[61573]: DEBUG nova.network.neutron [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Updating instance_info_cache with network_info: [{"id": "e46ab9c3-7f95-4d65-98fe-9728a8439324", "address": "fa:16:3e:76:22:c0", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape46ab9c3-7f", "ovs_interfaceid": "e46ab9c3-7f95-4d65-98fe-9728a8439324", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.797466] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.797466] env[61573]: value = "task-4836711" [ 557.797466] env[61573]: _type = "Task" [ 557.797466] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.807330] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836711, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.818578] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Releasing lock "refresh_cache-72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.819735] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Instance network_info: |[{"id": "e46ab9c3-7f95-4d65-98fe-9728a8439324", "address": "fa:16:3e:76:22:c0", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape46ab9c3-7f", "ovs_interfaceid": "e46ab9c3-7f95-4d65-98fe-9728a8439324", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 557.819919] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:22:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4c015b-4a8b-46ca-9556-74bad8db9fb3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e46ab9c3-7f95-4d65-98fe-9728a8439324', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.830492] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Creating folder: Project (03ab001ffb3148e7bdb7b1d4dfa3ea88). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.830492] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-307dd534-69b8-4510-99d5-26a472c70c74 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.844328] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Created folder: Project (03ab001ffb3148e7bdb7b1d4dfa3ea88) in parent group-v942801. [ 557.844328] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Creating folder: Instances. Parent ref: group-v942811. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.844328] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bf8c7c4-8bad-44b1-bf05-10ebf5a4ffbe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.857795] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Created folder: Instances in parent group-v942811. [ 557.857795] env[61573]: DEBUG oslo.service.loopingcall [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.857795] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 557.857795] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42b1c7ad-0974-40e3-877c-917dafd1ee59 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.881016] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.881016] env[61573]: value = "task-4836714" [ 557.881016] env[61573]: _type = "Task" [ 557.881016] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.889504] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836714, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.310811] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836711, 'name': CreateVM_Task, 'duration_secs': 0.393471} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.310981] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 558.311724] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.312081] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.312224] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 558.312488] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4acf6d03-92e7-4ec2-a10f-af5c5fed27c0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.318477] env[61573]: DEBUG oslo_vmware.api [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Waiting for the task: (returnval){ [ 558.318477] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52397265-eb17-3f06-280b-20ca1f4ea129" [ 558.318477] env[61573]: _type = "Task" [ 558.318477] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.328063] env[61573]: DEBUG oslo_vmware.api [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52397265-eb17-3f06-280b-20ca1f4ea129, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.357436] env[61573]: DEBUG nova.network.neutron [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Successfully updated port: 840c1ae5-5ecf-405c-bb16-04958928252d {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 558.389549] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "refresh_cache-da95334f-ebbf-4a7f-8492-ca310028c4dd" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.389665] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquired lock "refresh_cache-da95334f-ebbf-4a7f-8492-ca310028c4dd" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.389785] env[61573]: DEBUG nova.network.neutron [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 558.407423] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836714, 'name': CreateVM_Task, 'duration_secs': 0.366157} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.409634] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 558.410609] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.556154] env[61573]: DEBUG nova.network.neutron [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.840114] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.841432] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 558.841791] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.842198] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.842789] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 558.843205] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-311f2ffe-d0ba-4c88-87d6-4ecc924ac4c2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.854034] env[61573]: DEBUG oslo_vmware.api [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Waiting for the task: (returnval){ [ 558.854034] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52a399ae-7cad-12dd-dbc0-bc735914c59b" [ 558.854034] env[61573]: _type = "Task" [ 558.854034] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.868205] env[61573]: DEBUG oslo_vmware.api [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52a399ae-7cad-12dd-dbc0-bc735914c59b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.369056] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.369362] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 559.369596] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.420506] env[61573]: DEBUG nova.network.neutron [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Updating instance_info_cache with network_info: [{"id": "840c1ae5-5ecf-405c-bb16-04958928252d", "address": "fa:16:3e:46:43:62", "network": {"id": "7856e2ae-70df-4a10-a041-f5bab9f48841", "bridge": "br-int", "label": "tempest-ImagesTestJSON-708762296-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad80964934624fed809a3285c0f1b748", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840c1ae5-5e", "ovs_interfaceid": "840c1ae5-5ecf-405c-bb16-04958928252d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.442778] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Releasing lock "refresh_cache-da95334f-ebbf-4a7f-8492-ca310028c4dd" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.443938] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Instance network_info: |[{"id": "840c1ae5-5ecf-405c-bb16-04958928252d", "address": "fa:16:3e:46:43:62", "network": {"id": "7856e2ae-70df-4a10-a041-f5bab9f48841", "bridge": "br-int", "label": "tempest-ImagesTestJSON-708762296-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad80964934624fed809a3285c0f1b748", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840c1ae5-5e", "ovs_interfaceid": "840c1ae5-5ecf-405c-bb16-04958928252d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 559.447844] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:43:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '604c9724-b4ef-4393-a76e-eb4a2b510796', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '840c1ae5-5ecf-405c-bb16-04958928252d', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 559.459825] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Creating folder: Project (ad80964934624fed809a3285c0f1b748). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 559.465579] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-669f0f16-bbe9-4204-a4fe-ffb23805b5f9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.477163] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Created folder: Project (ad80964934624fed809a3285c0f1b748) in parent group-v942801. [ 559.477163] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Creating folder: Instances. Parent ref: group-v942814. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 559.477163] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c311f4aa-3beb-4d90-8346-39219d731b89 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.493509] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Created folder: Instances in parent group-v942814. [ 559.493509] env[61573]: DEBUG oslo.service.loopingcall [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 559.493509] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 559.493509] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b07cf62f-ef81-4694-9dd5-d82aa9c09f7f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.520185] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 559.520185] env[61573]: value = "task-4836717" [ 559.520185] env[61573]: _type = "Task" [ 559.520185] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.529633] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836717, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.032690] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836717, 'name': CreateVM_Task, 'duration_secs': 0.42224} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.033169] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 560.034170] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.034341] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.034639] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 560.035347] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-196589f0-c2b1-4779-8ee5-4c49899101ec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.042615] env[61573]: DEBUG oslo_vmware.api [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Waiting for the task: (returnval){ [ 560.042615] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524bb7fa-7c3a-0961-9793-389f45d620e9" [ 560.042615] env[61573]: _type = "Task" [ 560.042615] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.056894] env[61573]: DEBUG oslo_vmware.api [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524bb7fa-7c3a-0961-9793-389f45d620e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.563225] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.563487] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 560.563873] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.785099] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquiring lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.785413] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.802216] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 560.899133] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.899710] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.906471] env[61573]: INFO nova.compute.claims [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 561.075579] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "35ffeaee-b94d-482b-a053-f6cb9d2a7bc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.075871] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "35ffeaee-b94d-482b-a053-f6cb9d2a7bc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.111722] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 561.137489] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Received event network-changed-9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 561.137573] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Refreshing instance network info cache due to event network-changed-9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 561.139015] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Acquiring lock "refresh_cache-1da85728-bbee-4605-bf03-d49a650c4d1e" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.139015] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Acquired lock "refresh_cache-1da85728-bbee-4605-bf03-d49a650c4d1e" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.143294] env[61573]: DEBUG nova.network.neutron [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Refreshing network info cache for port 9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 561.224479] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e90d99-ca20-419c-9302-b3927ab19a31 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.228715] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.238020] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe51706-4366-4f22-9237-b9135a29b193 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.273619] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7728f3be-5fe3-4123-87e1-561b328a0b7d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.282517] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e55d6fe-c6ff-4bf0-8bc6-a2794e8b9026 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.299841] env[61573]: DEBUG nova.compute.provider_tree [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.308779] env[61573]: DEBUG nova.scheduler.client.report [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 561.345971] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.446s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.346524] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 561.354242] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.124s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.357107] env[61573]: INFO nova.compute.claims [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 561.424704] env[61573]: DEBUG nova.compute.utils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 561.426484] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 561.426653] env[61573]: DEBUG nova.network.neutron [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 561.443381] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 561.573100] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 561.612395] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 561.612643] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 561.612831] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 561.613057] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 561.613179] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 561.613319] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 561.613544] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 561.613681] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 561.613854] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 561.614019] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 561.614252] env[61573]: DEBUG nova.virt.hardware [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 561.615737] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74deb39f-53f5-47c6-9227-8941423123db {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.627113] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040a43a6-0f47-4c06-908f-909402c7bf87 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.679895] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abae730e-d8f5-4646-8bf4-ac9bc77d428d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.688099] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86603c34-ed82-4189-af62-43e4c8781e7f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.728731] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1d31c0-e0ff-4976-a322-b935ebdd41ff {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.739026] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1712618-3ad5-4686-b5a6-457736e59387 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.759019] env[61573]: DEBUG nova.compute.provider_tree [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.762334] env[61573]: DEBUG nova.policy [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '833f0e162c404167bdc793aa448fd551', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a9aa4dbe7894ffebca41f816a4eb2da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 561.768614] env[61573]: DEBUG nova.scheduler.client.report [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 561.793870] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.440s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.793870] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 561.847048] env[61573]: DEBUG nova.compute.utils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 561.848451] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 561.849534] env[61573]: DEBUG nova.network.neutron [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 561.859991] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 561.936670] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 561.966317] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 561.966606] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 561.966762] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 561.967418] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 561.967587] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 561.967737] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 561.967998] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 561.968248] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 561.968807] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 561.968807] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 561.968941] env[61573]: DEBUG nova.virt.hardware [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 561.969956] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dde5681-a31d-416d-a2e4-8ce7e16a7889 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.979650] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f9d09f-7de7-4795-b2c8-3bf59525b8a6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.192249] env[61573]: DEBUG nova.compute.manager [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Received event network-changed-6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 562.192485] env[61573]: DEBUG nova.compute.manager [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Refreshing instance network info cache due to event network-changed-6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 562.192624] env[61573]: DEBUG oslo_concurrency.lockutils [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] Acquiring lock "refresh_cache-8725d6e7-57cc-4d55-b21a-4aee65d5228b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.192655] env[61573]: DEBUG oslo_concurrency.lockutils [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] Acquired lock "refresh_cache-8725d6e7-57cc-4d55-b21a-4aee65d5228b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.193154] env[61573]: DEBUG nova.network.neutron [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Refreshing network info cache for port 6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 562.236165] env[61573]: DEBUG nova.network.neutron [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Updated VIF entry in instance network info cache for port 9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 562.236165] env[61573]: DEBUG nova.network.neutron [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Updating instance_info_cache with network_info: [{"id": "9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3", "address": "fa:16:3e:8e:51:0e", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa249eb-f1", "ovs_interfaceid": "9aa249eb-f12c-4f99-ac05-7fbbfce1d4d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.248293] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Releasing lock "refresh_cache-1da85728-bbee-4605-bf03-d49a650c4d1e" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.248552] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Received event network-vif-plugged-e46ab9c3-7f95-4d65-98fe-9728a8439324 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 562.248741] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Acquiring lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.248933] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.249099] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.249301] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] No waiting events found dispatching network-vif-plugged-e46ab9c3-7f95-4d65-98fe-9728a8439324 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 562.249483] env[61573]: WARNING nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Received unexpected event network-vif-plugged-e46ab9c3-7f95-4d65-98fe-9728a8439324 for instance with vm_state building and task_state spawning. [ 562.249658] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Received event network-vif-plugged-2af8ff4d-df9f-4b9d-b494-eaa05007940e {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 562.249852] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Acquiring lock "933ffe74-6883-4676-9be8-f12e45be35e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.251036] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Lock "933ffe74-6883-4676-9be8-f12e45be35e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.251036] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Lock "933ffe74-6883-4676-9be8-f12e45be35e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.251036] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] No waiting events found dispatching network-vif-plugged-2af8ff4d-df9f-4b9d-b494-eaa05007940e {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 562.251036] env[61573]: WARNING nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Received unexpected event network-vif-plugged-2af8ff4d-df9f-4b9d-b494-eaa05007940e for instance with vm_state building and task_state spawning. [ 562.251233] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Received event network-changed-e46ab9c3-7f95-4d65-98fe-9728a8439324 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 562.251233] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Refreshing instance network info cache due to event network-changed-e46ab9c3-7f95-4d65-98fe-9728a8439324. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 562.251233] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Acquiring lock "refresh_cache-72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.251568] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Acquired lock "refresh_cache-72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.251757] env[61573]: DEBUG nova.network.neutron [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Refreshing network info cache for port e46ab9c3-7f95-4d65-98fe-9728a8439324 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 562.480987] env[61573]: DEBUG nova.policy [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20cd1b0025fa4d7fb6b1706eeb6825af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '308f1e086ba943b9b9cf27a1da4eda0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 563.440358] env[61573]: DEBUG nova.network.neutron [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Updated VIF entry in instance network info cache for port 6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 563.440753] env[61573]: DEBUG nova.network.neutron [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Updating instance_info_cache with network_info: [{"id": "6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c", "address": "fa:16:3e:75:ac:b7", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba7f350-b6", "ovs_interfaceid": "6ba7f350-b65c-4da4-94e6-41e0e8c1ef9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.452629] env[61573]: DEBUG oslo_concurrency.lockutils [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] Releasing lock "refresh_cache-8725d6e7-57cc-4d55-b21a-4aee65d5228b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.452849] env[61573]: DEBUG nova.compute.manager [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Received event network-vif-plugged-840c1ae5-5ecf-405c-bb16-04958928252d {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 563.453058] env[61573]: DEBUG oslo_concurrency.lockutils [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] Acquiring lock "da95334f-ebbf-4a7f-8492-ca310028c4dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.453288] env[61573]: DEBUG oslo_concurrency.lockutils [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] Lock "da95334f-ebbf-4a7f-8492-ca310028c4dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.453426] env[61573]: DEBUG oslo_concurrency.lockutils [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] Lock "da95334f-ebbf-4a7f-8492-ca310028c4dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.453859] env[61573]: DEBUG nova.compute.manager [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] No waiting events found dispatching network-vif-plugged-840c1ae5-5ecf-405c-bb16-04958928252d {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 563.453859] env[61573]: WARNING nova.compute.manager [req-f23e035e-b4d2-487b-ae14-e1b6ab5a0122 req-a9896d4a-f8a6-430d-8d45-62bb4ee2d5ea service nova] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Received unexpected event network-vif-plugged-840c1ae5-5ecf-405c-bb16-04958928252d for instance with vm_state building and task_state spawning. [ 563.477289] env[61573]: DEBUG nova.network.neutron [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Updated VIF entry in instance network info cache for port e46ab9c3-7f95-4d65-98fe-9728a8439324. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 563.477607] env[61573]: DEBUG nova.network.neutron [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Updating instance_info_cache with network_info: [{"id": "e46ab9c3-7f95-4d65-98fe-9728a8439324", "address": "fa:16:3e:76:22:c0", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape46ab9c3-7f", "ovs_interfaceid": "e46ab9c3-7f95-4d65-98fe-9728a8439324", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.491220] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Releasing lock "refresh_cache-72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.491546] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Received event network-changed-2af8ff4d-df9f-4b9d-b494-eaa05007940e {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 563.491731] env[61573]: DEBUG nova.compute.manager [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Refreshing instance network info cache due to event network-changed-2af8ff4d-df9f-4b9d-b494-eaa05007940e. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 563.491935] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Acquiring lock "refresh_cache-933ffe74-6883-4676-9be8-f12e45be35e0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.492089] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Acquired lock "refresh_cache-933ffe74-6883-4676-9be8-f12e45be35e0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.492255] env[61573]: DEBUG nova.network.neutron [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Refreshing network info cache for port 2af8ff4d-df9f-4b9d-b494-eaa05007940e {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 564.216396] env[61573]: DEBUG nova.network.neutron [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Successfully created port: d0761f6d-ced5-4b2d-9361-b9ea05c9d031 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.346318] env[61573]: DEBUG nova.network.neutron [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Updated VIF entry in instance network info cache for port 2af8ff4d-df9f-4b9d-b494-eaa05007940e. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 564.348705] env[61573]: DEBUG nova.network.neutron [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Updating instance_info_cache with network_info: [{"id": "2af8ff4d-df9f-4b9d-b494-eaa05007940e", "address": "fa:16:3e:cd:ab:64", "network": {"id": "afa46ba6-d826-487c-8a2e-b9edcc239bb5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-387261800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3aabf5c6a1ed4a799a8d230cb0e37107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2af8ff4d-df", "ovs_interfaceid": "2af8ff4d-df9f-4b9d-b494-eaa05007940e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.373045] env[61573]: DEBUG oslo_concurrency.lockutils [req-4487d69a-e0f1-45ae-b009-48c7ec1b9685 req-930d609f-ad7d-4366-9c2c-f58605fbcf45 service nova] Releasing lock "refresh_cache-933ffe74-6883-4676-9be8-f12e45be35e0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.498485] env[61573]: DEBUG nova.network.neutron [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Successfully created port: b6110c79-8afe-4f2f-868e-74a7351711c0 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 566.470079] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.470079] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.498503] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 566.602419] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.602878] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.608190] env[61573]: INFO nova.compute.claims [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.860021] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624427c6-1691-4268-bab8-ec9cfa9a8356 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.869856] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee78525-cf4c-448f-b0d1-3dd10184d854 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.911511] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf58d11-9bd1-4fa5-a38e-dd85271cbeed {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.922264] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc1c142-8345-4967-b245-40cdd3dec251 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.944897] env[61573]: DEBUG nova.compute.provider_tree [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.967787] env[61573]: DEBUG nova.scheduler.client.report [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.991252] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.388s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.991252] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 567.047736] env[61573]: DEBUG nova.compute.utils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.049885] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 567.051866] env[61573]: DEBUG nova.network.neutron [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 567.071603] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 567.205274] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 567.249747] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.250172] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.252917] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.252917] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.252917] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.252917] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.252917] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.253328] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.253328] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.253328] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.253328] env[61573]: DEBUG nova.virt.hardware [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.254623] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5606147-7526-46bb-bd42-20725f8f38dc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.259740] env[61573]: DEBUG nova.policy [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20cd1b0025fa4d7fb6b1706eeb6825af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '308f1e086ba943b9b9cf27a1da4eda0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 567.269038] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07140f06-6ee8-4cfd-be08-bc54abdb17a8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.386782] env[61573]: DEBUG nova.network.neutron [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Successfully updated port: d0761f6d-ced5-4b2d-9361-b9ea05c9d031 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 567.404384] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquiring lock "refresh_cache-9cb4ec1b-e422-491d-8ee9-0103c740ea5b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.404576] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquired lock "refresh_cache-9cb4ec1b-e422-491d-8ee9-0103c740ea5b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.404731] env[61573]: DEBUG nova.network.neutron [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 567.754341] env[61573]: DEBUG nova.network.neutron [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 567.854219] env[61573]: DEBUG nova.network.neutron [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Successfully updated port: b6110c79-8afe-4f2f-868e-74a7351711c0 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 567.870125] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "refresh_cache-35ffeaee-b94d-482b-a053-f6cb9d2a7bc3" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.870465] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquired lock "refresh_cache-35ffeaee-b94d-482b-a053-f6cb9d2a7bc3" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.872284] env[61573]: DEBUG nova.network.neutron [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 568.023245] env[61573]: DEBUG nova.network.neutron [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.225600] env[61573]: DEBUG nova.network.neutron [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Successfully created port: 2ecdb20a-d0ad-4497-bb51-ea7ca97253d5 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 568.470473] env[61573]: DEBUG nova.network.neutron [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Updating instance_info_cache with network_info: [{"id": "d0761f6d-ced5-4b2d-9361-b9ea05c9d031", "address": "fa:16:3e:ae:bd:6e", "network": {"id": "b9cf39d8-339f-438c-9a5d-b13d172c2f7d", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1640170036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a9aa4dbe7894ffebca41f816a4eb2da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0761f6d-ce", "ovs_interfaceid": "d0761f6d-ced5-4b2d-9361-b9ea05c9d031", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.486081] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Releasing lock "refresh_cache-9cb4ec1b-e422-491d-8ee9-0103c740ea5b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.487337] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Instance network_info: |[{"id": "d0761f6d-ced5-4b2d-9361-b9ea05c9d031", "address": "fa:16:3e:ae:bd:6e", "network": {"id": "b9cf39d8-339f-438c-9a5d-b13d172c2f7d", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1640170036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a9aa4dbe7894ffebca41f816a4eb2da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0761f6d-ce", "ovs_interfaceid": "d0761f6d-ced5-4b2d-9361-b9ea05c9d031", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 568.488142] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:bd:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f87a752-ebb0-49a4-a67b-e356fa45b89b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0761f6d-ced5-4b2d-9361-b9ea05c9d031', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 568.503603] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Creating folder: Project (5a9aa4dbe7894ffebca41f816a4eb2da). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 568.504305] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45c387bd-1713-4ff2-bb22-8093e3731c8c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.517201] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Created folder: Project (5a9aa4dbe7894ffebca41f816a4eb2da) in parent group-v942801. [ 568.517201] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Creating folder: Instances. Parent ref: group-v942817. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 568.517522] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b03eaee-ea8e-47bb-a6db-4db3f74d7465 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.532742] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Created folder: Instances in parent group-v942817. [ 568.532742] env[61573]: DEBUG oslo.service.loopingcall [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.532742] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 568.532889] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d7dcee8-3449-42fe-83d6-ad9704951ec1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.560228] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 568.560228] env[61573]: value = "task-4836720" [ 568.560228] env[61573]: _type = "Task" [ 568.560228] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.572027] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836720, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.684372] env[61573]: DEBUG nova.network.neutron [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Updating instance_info_cache with network_info: [{"id": "b6110c79-8afe-4f2f-868e-74a7351711c0", "address": "fa:16:3e:ef:93:4b", "network": {"id": "71b5d912-bd14-4a16-ab18-24dffad95369", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-6868500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "308f1e086ba943b9b9cf27a1da4eda0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6110c79-8a", "ovs_interfaceid": "b6110c79-8afe-4f2f-868e-74a7351711c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.702985] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Releasing lock "refresh_cache-35ffeaee-b94d-482b-a053-f6cb9d2a7bc3" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.703360] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Instance network_info: |[{"id": "b6110c79-8afe-4f2f-868e-74a7351711c0", "address": "fa:16:3e:ef:93:4b", "network": {"id": "71b5d912-bd14-4a16-ab18-24dffad95369", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-6868500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "308f1e086ba943b9b9cf27a1da4eda0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6110c79-8a", "ovs_interfaceid": "b6110c79-8afe-4f2f-868e-74a7351711c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 568.704080] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:93:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6110c79-8afe-4f2f-868e-74a7351711c0', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 568.724766] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Creating folder: Project (308f1e086ba943b9b9cf27a1da4eda0a). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 568.727366] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46d890b4-59d0-43f0-b11d-6f4b3b8b1d61 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.733106] env[61573]: DEBUG nova.compute.manager [req-b96f4cee-425f-44dd-832e-f85ef9326c45 req-ab176d2f-ef1e-4a05-b8b1-438c282832d2 service nova] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Received event network-changed-840c1ae5-5ecf-405c-bb16-04958928252d {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 568.733106] env[61573]: DEBUG nova.compute.manager [req-b96f4cee-425f-44dd-832e-f85ef9326c45 req-ab176d2f-ef1e-4a05-b8b1-438c282832d2 service nova] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Refreshing instance network info cache due to event network-changed-840c1ae5-5ecf-405c-bb16-04958928252d. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 568.733228] env[61573]: DEBUG oslo_concurrency.lockutils [req-b96f4cee-425f-44dd-832e-f85ef9326c45 req-ab176d2f-ef1e-4a05-b8b1-438c282832d2 service nova] Acquiring lock "refresh_cache-da95334f-ebbf-4a7f-8492-ca310028c4dd" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.734463] env[61573]: DEBUG oslo_concurrency.lockutils [req-b96f4cee-425f-44dd-832e-f85ef9326c45 req-ab176d2f-ef1e-4a05-b8b1-438c282832d2 service nova] Acquired lock "refresh_cache-da95334f-ebbf-4a7f-8492-ca310028c4dd" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.734463] env[61573]: DEBUG nova.network.neutron [req-b96f4cee-425f-44dd-832e-f85ef9326c45 req-ab176d2f-ef1e-4a05-b8b1-438c282832d2 service nova] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Refreshing network info cache for port 840c1ae5-5ecf-405c-bb16-04958928252d {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 568.747787] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Created folder: Project (308f1e086ba943b9b9cf27a1da4eda0a) in parent group-v942801. [ 568.750655] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Creating folder: Instances. Parent ref: group-v942820. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 568.750655] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f28ea828-c017-4d77-8fa4-04d2e563ad63 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.761512] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Created folder: Instances in parent group-v942820. [ 568.761824] env[61573]: DEBUG oslo.service.loopingcall [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.762065] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 568.762896] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-712c62f5-592e-48f0-ad07-217c72b6b7ed {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.785955] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 568.785955] env[61573]: value = "task-4836723" [ 568.785955] env[61573]: _type = "Task" [ 568.785955] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.798142] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836723, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.076964] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836720, 'name': CreateVM_Task, 'duration_secs': 0.359749} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.077109] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 569.077815] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.078188] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.078371] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.078617] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-310ff7f7-d2c9-4193-8acc-93673dc56615 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.085834] env[61573]: DEBUG oslo_vmware.api [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Waiting for the task: (returnval){ [ 569.085834] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]526566cc-ad0f-8b92-efd8-02aa416bc88a" [ 569.085834] env[61573]: _type = "Task" [ 569.085834] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.102546] env[61573]: DEBUG oslo_vmware.api [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]526566cc-ad0f-8b92-efd8-02aa416bc88a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.131994] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquiring lock "c445065e-68e5-4dda-ba5d-314dc2da12cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.132259] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "c445065e-68e5-4dda-ba5d-314dc2da12cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.149048] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 569.224333] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.224393] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.230762] env[61573]: INFO nova.compute.claims [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.306144] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836723, 'name': CreateVM_Task, 'duration_secs': 0.368305} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.306144] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 569.307885] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.310438] env[61573]: DEBUG nova.network.neutron [req-b96f4cee-425f-44dd-832e-f85ef9326c45 req-ab176d2f-ef1e-4a05-b8b1-438c282832d2 service nova] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Updated VIF entry in instance network info cache for port 840c1ae5-5ecf-405c-bb16-04958928252d. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 569.310783] env[61573]: DEBUG nova.network.neutron [req-b96f4cee-425f-44dd-832e-f85ef9326c45 req-ab176d2f-ef1e-4a05-b8b1-438c282832d2 service nova] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Updating instance_info_cache with network_info: [{"id": "840c1ae5-5ecf-405c-bb16-04958928252d", "address": "fa:16:3e:46:43:62", "network": {"id": "7856e2ae-70df-4a10-a041-f5bab9f48841", "bridge": "br-int", "label": "tempest-ImagesTestJSON-708762296-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad80964934624fed809a3285c0f1b748", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840c1ae5-5e", "ovs_interfaceid": "840c1ae5-5ecf-405c-bb16-04958928252d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.332253] env[61573]: DEBUG oslo_concurrency.lockutils [req-b96f4cee-425f-44dd-832e-f85ef9326c45 req-ab176d2f-ef1e-4a05-b8b1-438c282832d2 service nova] Releasing lock "refresh_cache-da95334f-ebbf-4a7f-8492-ca310028c4dd" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.518985] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbaa0919-4abf-4f22-88fb-732a267fa8d2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.528541] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0c981e-bdf0-4880-9401-349a882867c1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.568411] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9dcaaae-320d-4f6c-bcc7-76863d5317ac {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.575448] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4459cc6-5b5b-4454-9a87-15f64646e986 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.598977] env[61573]: DEBUG nova.compute.provider_tree [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.608295] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.608556] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 569.608760] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.608959] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.609299] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.609560] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edd0a4c9-a276-4fd2-9a08-3309f36b974c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.614196] env[61573]: DEBUG nova.scheduler.client.report [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 569.626928] env[61573]: DEBUG oslo_vmware.api [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for the task: (returnval){ [ 569.626928] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cf9a80-35ba-904f-a16d-bdfa157afdd2" [ 569.626928] env[61573]: _type = "Task" [ 569.626928] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.635357] env[61573]: DEBUG oslo_vmware.api [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cf9a80-35ba-904f-a16d-bdfa157afdd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.650298] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.425s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.650860] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 569.715035] env[61573]: DEBUG nova.compute.utils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 569.716456] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 569.720620] env[61573]: DEBUG nova.network.neutron [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 569.749133] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 569.842146] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 569.854442] env[61573]: DEBUG nova.policy [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1df1c001c7eb491e8d1c5dbb3f918646', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5f0b55c024f4143a4e2a141e10c1db3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 569.878956] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 569.879249] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 569.879424] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.879660] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 569.879743] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.879887] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 569.881110] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 569.881364] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 569.881558] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 569.881691] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 569.881923] env[61573]: DEBUG nova.virt.hardware [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 569.883151] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e22c80d-5e07-4ace-8512-1d13622dac4e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.896757] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c246fa3e-8b5a-4afb-89f8-ce836d5fbc99 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.143279] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.144082] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 570.144082] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.363753] env[61573]: DEBUG nova.network.neutron [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Successfully updated port: 2ecdb20a-d0ad-4497-bb51-ea7ca97253d5 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 570.380581] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "refresh_cache-a9cca36b-ee0e-42b3-9c26-61c9b0715312" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.380695] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquired lock "refresh_cache-a9cca36b-ee0e-42b3-9c26-61c9b0715312" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.380846] env[61573]: DEBUG nova.network.neutron [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 570.459014] env[61573]: DEBUG nova.network.neutron [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 570.732588] env[61573]: DEBUG nova.network.neutron [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Successfully created port: 7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 571.045302] env[61573]: DEBUG nova.network.neutron [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Updating instance_info_cache with network_info: [{"id": "2ecdb20a-d0ad-4497-bb51-ea7ca97253d5", "address": "fa:16:3e:70:ff:14", "network": {"id": "71b5d912-bd14-4a16-ab18-24dffad95369", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-6868500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "308f1e086ba943b9b9cf27a1da4eda0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ecdb20a-d0", "ovs_interfaceid": "2ecdb20a-d0ad-4497-bb51-ea7ca97253d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.072682] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Releasing lock "refresh_cache-a9cca36b-ee0e-42b3-9c26-61c9b0715312" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.072998] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Instance network_info: |[{"id": "2ecdb20a-d0ad-4497-bb51-ea7ca97253d5", "address": "fa:16:3e:70:ff:14", "network": {"id": "71b5d912-bd14-4a16-ab18-24dffad95369", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-6868500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "308f1e086ba943b9b9cf27a1da4eda0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ecdb20a-d0", "ovs_interfaceid": "2ecdb20a-d0ad-4497-bb51-ea7ca97253d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 571.073412] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:ff:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ecdb20a-d0ad-4497-bb51-ea7ca97253d5', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 571.082148] env[61573]: DEBUG oslo.service.loopingcall [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 571.082677] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 571.082910] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f94d648-1034-4362-ba17-f1e765b1f5b3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.106963] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 571.106963] env[61573]: value = "task-4836724" [ 571.106963] env[61573]: _type = "Task" [ 571.106963] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.121181] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836724, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.210026] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquiring lock "4c70f154-7d65-4cea-ad90-8626f58b70f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.210182] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "4c70f154-7d65-4cea-ad90-8626f58b70f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.227368] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 571.253320] env[61573]: DEBUG nova.compute.manager [req-c4a0ca01-62a2-473a-a4fb-c3c6f4f1e5d0 req-c607f17f-b337-490c-bc45-be1ec4bd8b0a service nova] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Received event network-vif-plugged-d0761f6d-ced5-4b2d-9361-b9ea05c9d031 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 571.253320] env[61573]: DEBUG oslo_concurrency.lockutils [req-c4a0ca01-62a2-473a-a4fb-c3c6f4f1e5d0 req-c607f17f-b337-490c-bc45-be1ec4bd8b0a service nova] Acquiring lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.253669] env[61573]: DEBUG oslo_concurrency.lockutils [req-c4a0ca01-62a2-473a-a4fb-c3c6f4f1e5d0 req-c607f17f-b337-490c-bc45-be1ec4bd8b0a service nova] Lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.253841] env[61573]: DEBUG oslo_concurrency.lockutils [req-c4a0ca01-62a2-473a-a4fb-c3c6f4f1e5d0 req-c607f17f-b337-490c-bc45-be1ec4bd8b0a service nova] Lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.255104] env[61573]: DEBUG nova.compute.manager [req-c4a0ca01-62a2-473a-a4fb-c3c6f4f1e5d0 req-c607f17f-b337-490c-bc45-be1ec4bd8b0a service nova] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] No waiting events found dispatching network-vif-plugged-d0761f6d-ced5-4b2d-9361-b9ea05c9d031 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 571.255363] env[61573]: WARNING nova.compute.manager [req-c4a0ca01-62a2-473a-a4fb-c3c6f4f1e5d0 req-c607f17f-b337-490c-bc45-be1ec4bd8b0a service nova] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Received unexpected event network-vif-plugged-d0761f6d-ced5-4b2d-9361-b9ea05c9d031 for instance with vm_state building and task_state spawning. [ 571.304997] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.305488] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.307412] env[61573]: INFO nova.compute.claims [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.600020] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff661dd2-01e2-42fc-8438-d0aedcfbd680 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.613432] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cf502b-b803-4915-8b3a-5dc111229ff3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.623864] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836724, 'name': CreateVM_Task, 'duration_secs': 0.346268} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.650017] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 571.654873] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.654873] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.654873] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 571.654873] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1a331d-b3bd-4f59-a950-ef5e823f91ee {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.658929] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-430d0e9c-da77-4b3e-8599-9e600aa19244 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.667381] env[61573]: DEBUG oslo_vmware.api [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for the task: (returnval){ [ 571.667381] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524d3eb5-c67c-57d2-8f3e-57c970c7ddb3" [ 571.667381] env[61573]: _type = "Task" [ 571.667381] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.668785] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a0128f-24a3-4235-9867-197d208804f6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.689876] env[61573]: DEBUG nova.compute.provider_tree [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.697044] env[61573]: DEBUG oslo_vmware.api [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524d3eb5-c67c-57d2-8f3e-57c970c7ddb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.709032] env[61573]: DEBUG nova.scheduler.client.report [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.728907] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.423s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.729490] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 571.787415] env[61573]: DEBUG nova.compute.utils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 571.789432] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 571.790043] env[61573]: DEBUG nova.network.neutron [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 571.802129] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 571.877266] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 571.913026] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.913529] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.913928] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.917181] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.917181] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.917181] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.917181] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.917181] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.917349] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.917349] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 571.917349] env[61573]: DEBUG nova.virt.hardware [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 571.917349] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec42cc56-85bb-4d4b-9227-af5b32325f26 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.922135] env[61573]: DEBUG nova.policy [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de6a761b5a544c7fa5dab19d44c4a4ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19a0cd3d99c04e37aba52b7de50f98c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 571.930487] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f72d69-d3fd-4ea4-a888-3d37abedaeba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.154089] env[61573]: DEBUG nova.network.neutron [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Successfully updated port: 7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 572.176619] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquiring lock "refresh_cache-c445065e-68e5-4dda-ba5d-314dc2da12cf" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.176619] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquired lock "refresh_cache-c445065e-68e5-4dda-ba5d-314dc2da12cf" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.177191] env[61573]: DEBUG nova.network.neutron [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 572.189304] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.189615] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.189888] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.297877] env[61573]: DEBUG nova.network.neutron [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.561544] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquiring lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.562031] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.817636] env[61573]: DEBUG nova.network.neutron [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Successfully created port: 90e4fd72-9c8f-4e11-bba1-5da80f9265f6 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 572.917457] env[61573]: DEBUG nova.network.neutron [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Updating instance_info_cache with network_info: [{"id": "7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3", "address": "fa:16:3e:f7:63:ed", "network": {"id": "5277469a-bd98-438f-893b-88130814f5a0", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2048241908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5f0b55c024f4143a4e2a141e10c1db3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ccf56bf-a5", "ovs_interfaceid": "7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.931613] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Releasing lock "refresh_cache-c445065e-68e5-4dda-ba5d-314dc2da12cf" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.933946] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Instance network_info: |[{"id": "7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3", "address": "fa:16:3e:f7:63:ed", "network": {"id": "5277469a-bd98-438f-893b-88130814f5a0", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2048241908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5f0b55c024f4143a4e2a141e10c1db3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ccf56bf-a5", "ovs_interfaceid": "7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 572.934390] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:63:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.946071] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Creating folder: Project (a5f0b55c024f4143a4e2a141e10c1db3). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.946723] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-503a310d-8c1d-47d3-b698-1c95d4e3cea0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.958390] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Created folder: Project (a5f0b55c024f4143a4e2a141e10c1db3) in parent group-v942801. [ 572.958596] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Creating folder: Instances. Parent ref: group-v942824. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.958831] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b9cb9da-0a75-44cf-93d4-a5ac8676bb20 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.970335] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Created folder: Instances in parent group-v942824. [ 572.970527] env[61573]: DEBUG oslo.service.loopingcall [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.972394] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 572.972936] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-590f4ac4-aa45-4cea-be0a-2dd3550c4f4a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.001537] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 573.001537] env[61573]: value = "task-4836727" [ 573.001537] env[61573]: _type = "Task" [ 573.001537] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.013948] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836727, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.087661] env[61573]: DEBUG nova.compute.manager [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Received event network-vif-plugged-2ecdb20a-d0ad-4497-bb51-ea7ca97253d5 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 573.087874] env[61573]: DEBUG oslo_concurrency.lockutils [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] Acquiring lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.088147] env[61573]: DEBUG oslo_concurrency.lockutils [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] Lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.088254] env[61573]: DEBUG oslo_concurrency.lockutils [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] Lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.088447] env[61573]: DEBUG nova.compute.manager [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] No waiting events found dispatching network-vif-plugged-2ecdb20a-d0ad-4497-bb51-ea7ca97253d5 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 573.088579] env[61573]: WARNING nova.compute.manager [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Received unexpected event network-vif-plugged-2ecdb20a-d0ad-4497-bb51-ea7ca97253d5 for instance with vm_state building and task_state spawning. [ 573.088738] env[61573]: DEBUG nova.compute.manager [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Received event network-changed-2ecdb20a-d0ad-4497-bb51-ea7ca97253d5 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 573.088890] env[61573]: DEBUG nova.compute.manager [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Refreshing instance network info cache due to event network-changed-2ecdb20a-d0ad-4497-bb51-ea7ca97253d5. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 573.091223] env[61573]: DEBUG oslo_concurrency.lockutils [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] Acquiring lock "refresh_cache-a9cca36b-ee0e-42b3-9c26-61c9b0715312" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.091384] env[61573]: DEBUG oslo_concurrency.lockutils [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] Acquired lock "refresh_cache-a9cca36b-ee0e-42b3-9c26-61c9b0715312" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.091556] env[61573]: DEBUG nova.network.neutron [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Refreshing network info cache for port 2ecdb20a-d0ad-4497-bb51-ea7ca97253d5 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 573.522745] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836727, 'name': CreateVM_Task, 'duration_secs': 0.341916} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.523068] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 573.523694] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.523982] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.524382] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 573.529180] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c509732c-60de-4803-98f1-4d706f80ed17 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.535357] env[61573]: DEBUG oslo_vmware.api [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Waiting for the task: (returnval){ [ 573.535357] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d7a0a6-9ce1-e2f8-42fc-619f4e30ae25" [ 573.535357] env[61573]: _type = "Task" [ 573.535357] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.545942] env[61573]: DEBUG oslo_vmware.api [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d7a0a6-9ce1-e2f8-42fc-619f4e30ae25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.844948] env[61573]: DEBUG nova.network.neutron [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Updated VIF entry in instance network info cache for port 2ecdb20a-d0ad-4497-bb51-ea7ca97253d5. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 573.845366] env[61573]: DEBUG nova.network.neutron [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Updating instance_info_cache with network_info: [{"id": "2ecdb20a-d0ad-4497-bb51-ea7ca97253d5", "address": "fa:16:3e:70:ff:14", "network": {"id": "71b5d912-bd14-4a16-ab18-24dffad95369", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-6868500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "308f1e086ba943b9b9cf27a1da4eda0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ecdb20a-d0", "ovs_interfaceid": "2ecdb20a-d0ad-4497-bb51-ea7ca97253d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.861321] env[61573]: DEBUG oslo_concurrency.lockutils [req-61a88d14-8ca9-4fef-8f47-5f32308d38a2 req-c6946f84-b836-4a43-9989-333446088d8d service nova] Releasing lock "refresh_cache-a9cca36b-ee0e-42b3-9c26-61c9b0715312" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.971492] env[61573]: DEBUG nova.network.neutron [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Successfully updated port: 90e4fd72-9c8f-4e11-bba1-5da80f9265f6 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 573.987020] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquiring lock "refresh_cache-4c70f154-7d65-4cea-ad90-8626f58b70f5" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.987218] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquired lock "refresh_cache-4c70f154-7d65-4cea-ad90-8626f58b70f5" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.987376] env[61573]: DEBUG nova.network.neutron [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 574.051105] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.051105] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 574.051105] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.172660] env[61573]: DEBUG nova.network.neutron [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.478258] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.478583] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.653802] env[61573]: DEBUG nova.network.neutron [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Updating instance_info_cache with network_info: [{"id": "90e4fd72-9c8f-4e11-bba1-5da80f9265f6", "address": "fa:16:3e:85:fe:c1", "network": {"id": "791d655c-6411-4b91-aebe-29636d2af8b4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1171670700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19a0cd3d99c04e37aba52b7de50f98c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90e4fd72-9c", "ovs_interfaceid": "90e4fd72-9c8f-4e11-bba1-5da80f9265f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.677021] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Releasing lock "refresh_cache-4c70f154-7d65-4cea-ad90-8626f58b70f5" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.677021] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Instance network_info: |[{"id": "90e4fd72-9c8f-4e11-bba1-5da80f9265f6", "address": "fa:16:3e:85:fe:c1", "network": {"id": "791d655c-6411-4b91-aebe-29636d2af8b4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1171670700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19a0cd3d99c04e37aba52b7de50f98c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90e4fd72-9c", "ovs_interfaceid": "90e4fd72-9c8f-4e11-bba1-5da80f9265f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 574.677306] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:fe:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2c68e7-b690-42e2-9491-c3f9357cc66a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90e4fd72-9c8f-4e11-bba1-5da80f9265f6', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 574.688813] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Creating folder: Project (19a0cd3d99c04e37aba52b7de50f98c9). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 574.690204] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f4a52e3-fcce-4765-9a9f-1515d33e342f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.709215] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Created folder: Project (19a0cd3d99c04e37aba52b7de50f98c9) in parent group-v942801. [ 574.709993] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Creating folder: Instances. Parent ref: group-v942827. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 574.710615] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e1166cb-3331-4a7d-89d9-311615cb920e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.722666] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Created folder: Instances in parent group-v942827. [ 574.724020] env[61573]: DEBUG oslo.service.loopingcall [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.724020] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 574.724020] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a7629f8-40f9-447a-9015-2ae05d143140 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.749853] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 574.749853] env[61573]: value = "task-4836730" [ 574.749853] env[61573]: _type = "Task" [ 574.749853] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.761821] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836730, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.264155] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836730, 'name': CreateVM_Task, 'duration_secs': 0.390371} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.264429] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 575.265638] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.266331] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.266853] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 575.271316] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b2ccc66-a242-4c25-a11a-5a4b0da5574b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.284968] env[61573]: DEBUG oslo_vmware.api [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Waiting for the task: (returnval){ [ 575.284968] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]522ac5fc-d995-e536-7919-ac43b2d2caa7" [ 575.284968] env[61573]: _type = "Task" [ 575.284968] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.295779] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.296055] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 575.296922] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.820047] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Received event network-vif-plugged-b6110c79-8afe-4f2f-868e-74a7351711c0 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 575.823399] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Acquiring lock "35ffeaee-b94d-482b-a053-f6cb9d2a7bc3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.823632] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Lock "35ffeaee-b94d-482b-a053-f6cb9d2a7bc3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.823800] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Lock "35ffeaee-b94d-482b-a053-f6cb9d2a7bc3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.824147] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] No waiting events found dispatching network-vif-plugged-b6110c79-8afe-4f2f-868e-74a7351711c0 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 575.824343] env[61573]: WARNING nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Received unexpected event network-vif-plugged-b6110c79-8afe-4f2f-868e-74a7351711c0 for instance with vm_state building and task_state spawning. [ 575.824507] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Received event network-changed-d0761f6d-ced5-4b2d-9361-b9ea05c9d031 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 575.824658] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Refreshing instance network info cache due to event network-changed-d0761f6d-ced5-4b2d-9361-b9ea05c9d031. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 575.824839] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Acquiring lock "refresh_cache-9cb4ec1b-e422-491d-8ee9-0103c740ea5b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.824969] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Acquired lock "refresh_cache-9cb4ec1b-e422-491d-8ee9-0103c740ea5b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.825131] env[61573]: DEBUG nova.network.neutron [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Refreshing network info cache for port d0761f6d-ced5-4b2d-9361-b9ea05c9d031 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 577.344271] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.344629] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.403650] env[61573]: DEBUG nova.network.neutron [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Updated VIF entry in instance network info cache for port d0761f6d-ced5-4b2d-9361-b9ea05c9d031. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 577.404025] env[61573]: DEBUG nova.network.neutron [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Updating instance_info_cache with network_info: [{"id": "d0761f6d-ced5-4b2d-9361-b9ea05c9d031", "address": "fa:16:3e:ae:bd:6e", "network": {"id": "b9cf39d8-339f-438c-9a5d-b13d172c2f7d", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1640170036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a9aa4dbe7894ffebca41f816a4eb2da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0761f6d-ce", "ovs_interfaceid": "d0761f6d-ced5-4b2d-9361-b9ea05c9d031", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.418864] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Releasing lock "refresh_cache-9cb4ec1b-e422-491d-8ee9-0103c740ea5b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.421121] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Received event network-changed-b6110c79-8afe-4f2f-868e-74a7351711c0 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 577.421400] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Refreshing instance network info cache due to event network-changed-b6110c79-8afe-4f2f-868e-74a7351711c0. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 577.421573] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Acquiring lock "refresh_cache-35ffeaee-b94d-482b-a053-f6cb9d2a7bc3" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.421723] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Acquired lock "refresh_cache-35ffeaee-b94d-482b-a053-f6cb9d2a7bc3" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.422068] env[61573]: DEBUG nova.network.neutron [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Refreshing network info cache for port b6110c79-8afe-4f2f-868e-74a7351711c0 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 577.520472] env[61573]: DEBUG nova.compute.manager [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Received event network-vif-plugged-90e4fd72-9c8f-4e11-bba1-5da80f9265f6 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 577.520703] env[61573]: DEBUG oslo_concurrency.lockutils [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] Acquiring lock "4c70f154-7d65-4cea-ad90-8626f58b70f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.520905] env[61573]: DEBUG oslo_concurrency.lockutils [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] Lock "4c70f154-7d65-4cea-ad90-8626f58b70f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.521454] env[61573]: DEBUG oslo_concurrency.lockutils [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] Lock "4c70f154-7d65-4cea-ad90-8626f58b70f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.521671] env[61573]: DEBUG nova.compute.manager [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] No waiting events found dispatching network-vif-plugged-90e4fd72-9c8f-4e11-bba1-5da80f9265f6 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 577.521855] env[61573]: WARNING nova.compute.manager [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Received unexpected event network-vif-plugged-90e4fd72-9c8f-4e11-bba1-5da80f9265f6 for instance with vm_state building and task_state spawning. [ 577.522097] env[61573]: DEBUG nova.compute.manager [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Received event network-changed-90e4fd72-9c8f-4e11-bba1-5da80f9265f6 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 577.522188] env[61573]: DEBUG nova.compute.manager [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Refreshing instance network info cache due to event network-changed-90e4fd72-9c8f-4e11-bba1-5da80f9265f6. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 577.522377] env[61573]: DEBUG oslo_concurrency.lockutils [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] Acquiring lock "refresh_cache-4c70f154-7d65-4cea-ad90-8626f58b70f5" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.522511] env[61573]: DEBUG oslo_concurrency.lockutils [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] Acquired lock "refresh_cache-4c70f154-7d65-4cea-ad90-8626f58b70f5" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.522748] env[61573]: DEBUG nova.network.neutron [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Refreshing network info cache for port 90e4fd72-9c8f-4e11-bba1-5da80f9265f6 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 578.488730] env[61573]: DEBUG nova.network.neutron [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Updated VIF entry in instance network info cache for port 90e4fd72-9c8f-4e11-bba1-5da80f9265f6. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 578.492138] env[61573]: DEBUG nova.network.neutron [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Updating instance_info_cache with network_info: [{"id": "90e4fd72-9c8f-4e11-bba1-5da80f9265f6", "address": "fa:16:3e:85:fe:c1", "network": {"id": "791d655c-6411-4b91-aebe-29636d2af8b4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1171670700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19a0cd3d99c04e37aba52b7de50f98c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90e4fd72-9c", "ovs_interfaceid": "90e4fd72-9c8f-4e11-bba1-5da80f9265f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.503427] env[61573]: DEBUG oslo_concurrency.lockutils [req-497f44d1-6398-4261-b344-01cfb69954ea req-069f36be-91af-45e8-8f64-c8b9f16b12cd service nova] Releasing lock "refresh_cache-4c70f154-7d65-4cea-ad90-8626f58b70f5" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.573110] env[61573]: DEBUG nova.network.neutron [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Updated VIF entry in instance network info cache for port b6110c79-8afe-4f2f-868e-74a7351711c0. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 578.573745] env[61573]: DEBUG nova.network.neutron [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Updating instance_info_cache with network_info: [{"id": "b6110c79-8afe-4f2f-868e-74a7351711c0", "address": "fa:16:3e:ef:93:4b", "network": {"id": "71b5d912-bd14-4a16-ab18-24dffad95369", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-6868500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "308f1e086ba943b9b9cf27a1da4eda0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6110c79-8a", "ovs_interfaceid": "b6110c79-8afe-4f2f-868e-74a7351711c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.595327] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Releasing lock "refresh_cache-35ffeaee-b94d-482b-a053-f6cb9d2a7bc3" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.595608] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Received event network-vif-plugged-7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 578.595799] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Acquiring lock "c445065e-68e5-4dda-ba5d-314dc2da12cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.596011] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Lock "c445065e-68e5-4dda-ba5d-314dc2da12cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.596180] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Lock "c445065e-68e5-4dda-ba5d-314dc2da12cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.596393] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] No waiting events found dispatching network-vif-plugged-7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 578.596503] env[61573]: WARNING nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Received unexpected event network-vif-plugged-7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3 for instance with vm_state building and task_state spawning. [ 578.596667] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Received event network-changed-7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 578.596824] env[61573]: DEBUG nova.compute.manager [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Refreshing instance network info cache due to event network-changed-7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 578.597018] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Acquiring lock "refresh_cache-c445065e-68e5-4dda-ba5d-314dc2da12cf" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.597230] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Acquired lock "refresh_cache-c445065e-68e5-4dda-ba5d-314dc2da12cf" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.597398] env[61573]: DEBUG nova.network.neutron [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Refreshing network info cache for port 7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 579.540350] env[61573]: DEBUG nova.network.neutron [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Updated VIF entry in instance network info cache for port 7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 579.541341] env[61573]: DEBUG nova.network.neutron [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Updating instance_info_cache with network_info: [{"id": "7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3", "address": "fa:16:3e:f7:63:ed", "network": {"id": "5277469a-bd98-438f-893b-88130814f5a0", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2048241908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5f0b55c024f4143a4e2a141e10c1db3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ccf56bf-a5", "ovs_interfaceid": "7ccf56bf-a5e8-4603-9a99-5dfc42b4fcb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.559845] env[61573]: DEBUG oslo_concurrency.lockutils [req-640d4c62-3031-4169-b72e-de069156ea89 req-140a3ff2-c7cf-404b-9785-ecb5708ec358 service nova] Releasing lock "refresh_cache-c445065e-68e5-4dda-ba5d-314dc2da12cf" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.270153] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquiring lock "8c57d493-12c4-47fe-a355-c9ade98b7158" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.270153] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.910296] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d74865c9-332d-41a8-ae31-2b7e7832351e tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "549dbec8-1008-4930-8e2d-97cd11cfa45f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.910645] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d74865c9-332d-41a8-ae31-2b7e7832351e tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "549dbec8-1008-4930-8e2d-97cd11cfa45f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.037362] env[61573]: DEBUG oslo_concurrency.lockutils [None req-19346093-52ce-472e-8389-2aa8ed3fd1b7 tempest-VolumesAssistedSnapshotsTest-685401197 tempest-VolumesAssistedSnapshotsTest-685401197-project-member] Acquiring lock "fe988112-9ee6-4fa7-8914-87119aaa2549" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.037719] env[61573]: DEBUG oslo_concurrency.lockutils [None req-19346093-52ce-472e-8389-2aa8ed3fd1b7 tempest-VolumesAssistedSnapshotsTest-685401197 tempest-VolumesAssistedSnapshotsTest-685401197-project-member] Lock "fe988112-9ee6-4fa7-8914-87119aaa2549" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.141021] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c4e48610-55b2-48e1-925f-0c4704229484 tempest-ServersWithSpecificFlavorTestJSON-785244569 tempest-ServersWithSpecificFlavorTestJSON-785244569-project-member] Acquiring lock "91def60e-ad74-435f-a006-eb5672928010" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.141021] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c4e48610-55b2-48e1-925f-0c4704229484 tempest-ServersWithSpecificFlavorTestJSON-785244569 tempest-ServersWithSpecificFlavorTestJSON-785244569-project-member] Lock "91def60e-ad74-435f-a006-eb5672928010" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.705236] env[61573]: DEBUG oslo_concurrency.lockutils [None req-792c39a7-9da0-4132-af21-92e184faab4f tempest-ServerAddressesNegativeTestJSON-2109027869 tempest-ServerAddressesNegativeTestJSON-2109027869-project-member] Acquiring lock "643dbc96-7e32-4442-ab44-cbd74b0ffd72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.705660] env[61573]: DEBUG oslo_concurrency.lockutils [None req-792c39a7-9da0-4132-af21-92e184faab4f tempest-ServerAddressesNegativeTestJSON-2109027869 tempest-ServerAddressesNegativeTestJSON-2109027869-project-member] Lock "643dbc96-7e32-4442-ab44-cbd74b0ffd72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.489873] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24abebe1-1b99-4d94-bf50-5e3952100ef5 tempest-ServersAdmin275Test-1369214867 tempest-ServersAdmin275Test-1369214867-project-member] Acquiring lock "f42d10b1-6733-4c39-9230-fd41dd82bb20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.490215] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24abebe1-1b99-4d94-bf50-5e3952100ef5 tempest-ServersAdmin275Test-1369214867 tempest-ServersAdmin275Test-1369214867-project-member] Lock "f42d10b1-6733-4c39-9230-fd41dd82bb20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.040186] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fbeb75b7-ced4-4e0c-9bd4-a20ce96a050d tempest-ServerDiagnosticsV248Test-403520221 tempest-ServerDiagnosticsV248Test-403520221-project-member] Acquiring lock "fbd512f6-5c22-4a40-846c-0fece5ee7bcb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.040186] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fbeb75b7-ced4-4e0c-9bd4-a20ce96a050d tempest-ServerDiagnosticsV248Test-403520221 tempest-ServerDiagnosticsV248Test-403520221-project-member] Lock "fbd512f6-5c22-4a40-846c-0fece5ee7bcb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.062294] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1e9c79b5-0651-4ca7-a7fe-02f52d99afe6 tempest-InstanceActionsNegativeTestJSON-465330128 tempest-InstanceActionsNegativeTestJSON-465330128-project-member] Acquiring lock "37256e1b-8191-4bee-a3c3-dd31c99d9301" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.062633] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1e9c79b5-0651-4ca7-a7fe-02f52d99afe6 tempest-InstanceActionsNegativeTestJSON-465330128 tempest-InstanceActionsNegativeTestJSON-465330128-project-member] Lock "37256e1b-8191-4bee-a3c3-dd31c99d9301" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.081639] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244ff0f2-ebc7-430a-8f38-7f56f8e2fb66 tempest-ServersTestFqdnHostnames-50427058 tempest-ServersTestFqdnHostnames-50427058-project-member] Acquiring lock "3d0bfbba-2fec-4063-91f3-3152b3ea537e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.082725] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244ff0f2-ebc7-430a-8f38-7f56f8e2fb66 tempest-ServersTestFqdnHostnames-50427058 tempest-ServersTestFqdnHostnames-50427058-project-member] Lock "3d0bfbba-2fec-4063-91f3-3152b3ea537e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.894880] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8c903a3-f440-48fb-8c3c-85e5459c1e36 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "a8b2d4b2-731f-489a-8b11-c816e6827189" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.898783] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8c903a3-f440-48fb-8c3c-85e5459c1e36 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "a8b2d4b2-731f-489a-8b11-c816e6827189" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.511070] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9951a5bd-3235-4023-8b41-796e782a1017 tempest-ServerMetadataTestJSON-759562833 tempest-ServerMetadataTestJSON-759562833-project-member] Acquiring lock "65a29af2-4c3b-4556-ae95-adab1fa7aad4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.511463] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9951a5bd-3235-4023-8b41-796e782a1017 tempest-ServerMetadataTestJSON-759562833 tempest-ServerMetadataTestJSON-759562833-project-member] Lock "65a29af2-4c3b-4556-ae95-adab1fa7aad4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.349213] env[61573]: WARNING oslo_vmware.rw_handles [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 602.349213] env[61573]: ERROR oslo_vmware.rw_handles [ 602.349914] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/3f6d1aa8-1e66-4c34-8bad-f1e59c5c416d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 602.351980] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 602.352272] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Copying Virtual Disk [datastore2] vmware_temp/3f6d1aa8-1e66-4c34-8bad-f1e59c5c416d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/3f6d1aa8-1e66-4c34-8bad-f1e59c5c416d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 602.352579] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e040bb5d-9309-484c-96f3-2aa774781d91 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.362908] env[61573]: DEBUG oslo_vmware.api [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for the task: (returnval){ [ 602.362908] env[61573]: value = "task-4836731" [ 602.362908] env[61573]: _type = "Task" [ 602.362908] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.376169] env[61573]: DEBUG oslo_vmware.api [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Task: {'id': task-4836731, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.877694] env[61573]: DEBUG oslo_vmware.exceptions [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 602.877989] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.881352] env[61573]: ERROR nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 602.881352] env[61573]: Faults: ['InvalidArgument'] [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Traceback (most recent call last): [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] yield resources [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] self.driver.spawn(context, instance, image_meta, [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] self._fetch_image_if_missing(context, vi) [ 602.881352] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] image_cache(vi, tmp_image_ds_loc) [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] vm_util.copy_virtual_disk( [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] session._wait_for_task(vmdk_copy_task) [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] return self.wait_for_task(task_ref) [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] return evt.wait() [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] result = hub.switch() [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.881888] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] return self.greenlet.switch() [ 602.882322] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 602.882322] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] self.f(*self.args, **self.kw) [ 602.882322] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 602.882322] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] raise exceptions.translate_fault(task_info.error) [ 602.882322] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 602.882322] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Faults: ['InvalidArgument'] [ 602.882322] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] [ 602.882322] env[61573]: INFO nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Terminating instance [ 602.883914] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.883914] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.884337] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 602.884515] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 602.884768] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ddb6b6b-7dec-4525-bbfa-e91d997ca676 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.887670] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97d1be9-b502-4768-8f8c-e7f91f1deff8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.901045] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.901257] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 602.902087] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 602.902326] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d56894e-31f5-41c0-b86a-8f8d8bca103d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.904749] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8049a3a7-cf3d-45ff-87e0-3f07ae425154 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.909840] env[61573]: DEBUG oslo_vmware.api [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Waiting for the task: (returnval){ [ 602.909840] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52a19521-44b1-9e28-8cf0-f702678baf4e" [ 602.909840] env[61573]: _type = "Task" [ 602.909840] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.918310] env[61573]: DEBUG oslo_vmware.api [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52a19521-44b1-9e28-8cf0-f702678baf4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.980829] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 602.981535] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 602.981865] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Deleting the datastore file [datastore2] 1da85728-bbee-4605-bf03-d49a650c4d1e {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 602.982212] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcfc6591-3729-4cb7-a3b5-43cf989bf826 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.993258] env[61573]: DEBUG oslo_vmware.api [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for the task: (returnval){ [ 602.993258] env[61573]: value = "task-4836733" [ 602.993258] env[61573]: _type = "Task" [ 602.993258] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.003121] env[61573]: DEBUG oslo_vmware.api [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Task: {'id': task-4836733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.407036] env[61573]: DEBUG oslo_concurrency.lockutils [None req-98aa3695-9340-4318-b5fc-4eef6a9cc7e8 tempest-ServersNegativeTestJSON-472036444 tempest-ServersNegativeTestJSON-472036444-project-member] Acquiring lock "0d55fe3b-3a7e-493d-b705-10241a2ac392" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.407036] env[61573]: DEBUG oslo_concurrency.lockutils [None req-98aa3695-9340-4318-b5fc-4eef6a9cc7e8 tempest-ServersNegativeTestJSON-472036444 tempest-ServersNegativeTestJSON-472036444-project-member] Lock "0d55fe3b-3a7e-493d-b705-10241a2ac392" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.422547] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 603.423823] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Creating directory with path [datastore2] vmware_temp/218b8025-6762-4bbc-9e1f-92a296172b67/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 603.423823] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6972123c-1bdd-4f7b-bdf5-5bd52adabe33 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.439543] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Created directory with path [datastore2] vmware_temp/218b8025-6762-4bbc-9e1f-92a296172b67/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 603.440067] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Fetch image to [datastore2] vmware_temp/218b8025-6762-4bbc-9e1f-92a296172b67/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 603.440067] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/218b8025-6762-4bbc-9e1f-92a296172b67/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 603.441061] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478025d1-6287-41c2-a1b6-6c17f99582d6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.449271] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef40031-67db-4929-9fd8-a70d00ee40d1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.461903] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4344e2fb-73f9-4b13-a63f-5ccc3fe75009 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.506787] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a17ad0-17b3-4b51-bb87-f97b8641d447 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.517621] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-826e1b44-2de2-43d5-bf35-7b0d7551623a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.519870] env[61573]: DEBUG oslo_vmware.api [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Task: {'id': task-4836733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086739} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.520148] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 603.520353] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 603.520591] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 603.521023] env[61573]: INFO nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Took 0.64 seconds to destroy the instance on the hypervisor. [ 603.523916] env[61573]: DEBUG nova.compute.claims [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 603.524215] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.524359] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.613417] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 603.697746] env[61573]: DEBUG oslo_vmware.rw_handles [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/218b8025-6762-4bbc-9e1f-92a296172b67/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 603.764356] env[61573]: DEBUG oslo_vmware.rw_handles [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 603.765815] env[61573]: DEBUG oslo_vmware.rw_handles [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/218b8025-6762-4bbc-9e1f-92a296172b67/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 604.100980] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910aa95d-efe2-4678-a5b2-e0870329e4df {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.113265] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0a85de-3051-4ba0-a76e-20f22591e065 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.160143] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91269ec9-a4e6-43e3-8047-ddb4e3a4de39 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.168207] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9ad322-0b22-4e3b-9726-6db7022108b7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.183485] env[61573]: DEBUG nova.compute.provider_tree [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.199600] env[61573]: DEBUG nova.scheduler.client.report [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 604.213845] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.689s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.214427] env[61573]: ERROR nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 604.214427] env[61573]: Faults: ['InvalidArgument'] [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Traceback (most recent call last): [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] self.driver.spawn(context, instance, image_meta, [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] self._fetch_image_if_missing(context, vi) [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] image_cache(vi, tmp_image_ds_loc) [ 604.214427] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] vm_util.copy_virtual_disk( [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] session._wait_for_task(vmdk_copy_task) [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] return self.wait_for_task(task_ref) [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] return evt.wait() [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] result = hub.switch() [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] return self.greenlet.switch() [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 604.215164] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] self.f(*self.args, **self.kw) [ 604.215809] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 604.215809] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] raise exceptions.translate_fault(task_info.error) [ 604.215809] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 604.215809] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Faults: ['InvalidArgument'] [ 604.215809] env[61573]: ERROR nova.compute.manager [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] [ 604.215809] env[61573]: DEBUG nova.compute.utils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 604.220298] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Build of instance 1da85728-bbee-4605-bf03-d49a650c4d1e was re-scheduled: A specified parameter was not correct: fileType [ 604.220298] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 604.220802] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 604.221030] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 604.221185] env[61573]: DEBUG nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 604.221350] env[61573]: DEBUG nova.network.neutron [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 605.163362] env[61573]: DEBUG nova.network.neutron [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.184556] env[61573]: INFO nova.compute.manager [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 1da85728-bbee-4605-bf03-d49a650c4d1e] Took 0.96 seconds to deallocate network for instance. [ 605.376641] env[61573]: INFO nova.scheduler.client.report [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Deleted allocations for instance 1da85728-bbee-4605-bf03-d49a650c4d1e [ 605.413424] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3bea61c-4742-4315-8b63-88f527f65760 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "1da85728-bbee-4605-bf03-d49a650c4d1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.965s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.455073] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 605.548582] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.548835] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.550378] env[61573]: INFO nova.compute.claims [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 606.191046] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3a6b0a-5540-4855-81c6-cf744410aa9b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.198385] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4019d40a-ac98-4562-8dc0-b4cbc2b7baa5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.233202] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bb6225-e952-4bc7-85e3-13bcced384c7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.242109] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84cca39-a22a-4917-a4cf-6d203713c578 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.256772] env[61573]: DEBUG nova.compute.provider_tree [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.271146] env[61573]: DEBUG nova.scheduler.client.report [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 606.295021] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.744s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.295021] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 606.362158] env[61573]: DEBUG nova.compute.utils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 606.366188] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 606.366188] env[61573]: DEBUG nova.network.neutron [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 606.409960] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 606.494848] env[61573]: DEBUG nova.policy [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3eb02fff741e404b85065a8323568219', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bb57dd7a5d14a62bc23a9747d5d4ffe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 606.594301] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 606.629370] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 606.629970] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 606.629970] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 606.629970] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 606.630156] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 606.630751] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 606.630751] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 606.630751] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 606.630914] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 606.631034] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 606.631233] env[61573]: DEBUG nova.virt.hardware [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 606.632835] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af03b973-8916-4747-a623-c6008da38d9b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.642367] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e6e214-b7fd-460b-9e6a-accd9f392fdf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.620632] env[61573]: DEBUG nova.network.neutron [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Successfully created port: 01b08e0b-db87-4721-91ce-d6d86f13ec15 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.306719] env[61573]: DEBUG nova.network.neutron [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Successfully updated port: 01b08e0b-db87-4721-91ce-d6d86f13ec15 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 609.325050] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquiring lock "refresh_cache-f63a7f37-09ac-4fe8-a1a3-7e13eb158526" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.325319] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquired lock "refresh_cache-f63a7f37-09ac-4fe8-a1a3-7e13eb158526" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.325598] env[61573]: DEBUG nova.network.neutron [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 609.392128] env[61573]: DEBUG nova.network.neutron [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.878217] env[61573]: DEBUG nova.network.neutron [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Updating instance_info_cache with network_info: [{"id": "01b08e0b-db87-4721-91ce-d6d86f13ec15", "address": "fa:16:3e:d1:9d:ec", "network": {"id": "38cec38a-b180-4678-88cc-26d28a3018c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2105355313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bb57dd7a5d14a62bc23a9747d5d4ffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b08e0b-db", "ovs_interfaceid": "01b08e0b-db87-4721-91ce-d6d86f13ec15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.902592] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Releasing lock "refresh_cache-f63a7f37-09ac-4fe8-a1a3-7e13eb158526" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.902978] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Instance network_info: |[{"id": "01b08e0b-db87-4721-91ce-d6d86f13ec15", "address": "fa:16:3e:d1:9d:ec", "network": {"id": "38cec38a-b180-4678-88cc-26d28a3018c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2105355313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bb57dd7a5d14a62bc23a9747d5d4ffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b08e0b-db", "ovs_interfaceid": "01b08e0b-db87-4721-91ce-d6d86f13ec15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 609.903384] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:9d:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01b08e0b-db87-4721-91ce-d6d86f13ec15', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 609.913740] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Creating folder: Project (2bb57dd7a5d14a62bc23a9747d5d4ffe). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 609.914038] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e7b8972-1943-419f-8f80-4343150acaed {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.926808] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Created folder: Project (2bb57dd7a5d14a62bc23a9747d5d4ffe) in parent group-v942801. [ 609.926933] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Creating folder: Instances. Parent ref: group-v942830. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 609.927228] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68aeee54-efc8-45f5-a766-a82990347795 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.944744] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Created folder: Instances in parent group-v942830. [ 609.945064] env[61573]: DEBUG oslo.service.loopingcall [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.945284] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 609.945512] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdd1180b-aeb2-41d8-bbd5-e33783b3b0ea {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.003510] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.003510] env[61573]: value = "task-4836736" [ 610.003510] env[61573]: _type = "Task" [ 610.003510] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.018401] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836736, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.525668] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836736, 'name': CreateVM_Task, 'duration_secs': 0.365181} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.525668] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 610.525668] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.525668] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.525668] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 610.526468] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-914ba49f-4c17-4753-a7df-822a1554ba54 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.526468] env[61573]: DEBUG oslo_vmware.api [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Waiting for the task: (returnval){ [ 610.526468] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52717f4d-5114-fb12-b6c6-62160bc1ab17" [ 610.526468] env[61573]: _type = "Task" [ 610.526468] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.536013] env[61573]: DEBUG oslo_vmware.api [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52717f4d-5114-fb12-b6c6-62160bc1ab17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.542788] env[61573]: DEBUG nova.compute.manager [req-56148ba5-7f45-4ebf-ba76-8fea02fd68e1 req-4ad1b4e6-1337-49b4-a46f-3b9c845cc37e service nova] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Received event network-vif-plugged-01b08e0b-db87-4721-91ce-d6d86f13ec15 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 610.543126] env[61573]: DEBUG oslo_concurrency.lockutils [req-56148ba5-7f45-4ebf-ba76-8fea02fd68e1 req-4ad1b4e6-1337-49b4-a46f-3b9c845cc37e service nova] Acquiring lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.543506] env[61573]: DEBUG oslo_concurrency.lockutils [req-56148ba5-7f45-4ebf-ba76-8fea02fd68e1 req-4ad1b4e6-1337-49b4-a46f-3b9c845cc37e service nova] Lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.543824] env[61573]: DEBUG oslo_concurrency.lockutils [req-56148ba5-7f45-4ebf-ba76-8fea02fd68e1 req-4ad1b4e6-1337-49b4-a46f-3b9c845cc37e service nova] Lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.544362] env[61573]: DEBUG nova.compute.manager [req-56148ba5-7f45-4ebf-ba76-8fea02fd68e1 req-4ad1b4e6-1337-49b4-a46f-3b9c845cc37e service nova] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] No waiting events found dispatching network-vif-plugged-01b08e0b-db87-4721-91ce-d6d86f13ec15 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 610.544487] env[61573]: WARNING nova.compute.manager [req-56148ba5-7f45-4ebf-ba76-8fea02fd68e1 req-4ad1b4e6-1337-49b4-a46f-3b9c845cc37e service nova] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Received unexpected event network-vif-plugged-01b08e0b-db87-4721-91ce-d6d86f13ec15 for instance with vm_state building and task_state spawning. [ 610.873713] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "338e2879-7dbe-4334-80da-4bbc1a071aa8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.873971] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.034029] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.034236] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 611.034450] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.649479] env[61573]: DEBUG nova.compute.manager [req-221550a1-31b3-47c2-844a-adb0ea6bb352 req-b856483c-038f-49f6-b028-550dbb6b73f9 service nova] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Received event network-changed-01b08e0b-db87-4721-91ce-d6d86f13ec15 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 613.649740] env[61573]: DEBUG nova.compute.manager [req-221550a1-31b3-47c2-844a-adb0ea6bb352 req-b856483c-038f-49f6-b028-550dbb6b73f9 service nova] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Refreshing instance network info cache due to event network-changed-01b08e0b-db87-4721-91ce-d6d86f13ec15. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 613.649896] env[61573]: DEBUG oslo_concurrency.lockutils [req-221550a1-31b3-47c2-844a-adb0ea6bb352 req-b856483c-038f-49f6-b028-550dbb6b73f9 service nova] Acquiring lock "refresh_cache-f63a7f37-09ac-4fe8-a1a3-7e13eb158526" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.650046] env[61573]: DEBUG oslo_concurrency.lockutils [req-221550a1-31b3-47c2-844a-adb0ea6bb352 req-b856483c-038f-49f6-b028-550dbb6b73f9 service nova] Acquired lock "refresh_cache-f63a7f37-09ac-4fe8-a1a3-7e13eb158526" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.650278] env[61573]: DEBUG nova.network.neutron [req-221550a1-31b3-47c2-844a-adb0ea6bb352 req-b856483c-038f-49f6-b028-550dbb6b73f9 service nova] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Refreshing network info cache for port 01b08e0b-db87-4721-91ce-d6d86f13ec15 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 613.683418] env[61573]: DEBUG oslo_concurrency.lockutils [None req-76893cfa-de01-4f69-8c6c-bbf996e6f4bc tempest-ServerActionsTestOtherA-1332715427 tempest-ServerActionsTestOtherA-1332715427-project-member] Acquiring lock "79b5dc73-2168-46b5-8045-13e92666a69a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.683418] env[61573]: DEBUG oslo_concurrency.lockutils [None req-76893cfa-de01-4f69-8c6c-bbf996e6f4bc tempest-ServerActionsTestOtherA-1332715427 tempest-ServerActionsTestOtherA-1332715427-project-member] Lock "79b5dc73-2168-46b5-8045-13e92666a69a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.086776] env[61573]: DEBUG nova.network.neutron [req-221550a1-31b3-47c2-844a-adb0ea6bb352 req-b856483c-038f-49f6-b028-550dbb6b73f9 service nova] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Updated VIF entry in instance network info cache for port 01b08e0b-db87-4721-91ce-d6d86f13ec15. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 614.087341] env[61573]: DEBUG nova.network.neutron [req-221550a1-31b3-47c2-844a-adb0ea6bb352 req-b856483c-038f-49f6-b028-550dbb6b73f9 service nova] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Updating instance_info_cache with network_info: [{"id": "01b08e0b-db87-4721-91ce-d6d86f13ec15", "address": "fa:16:3e:d1:9d:ec", "network": {"id": "38cec38a-b180-4678-88cc-26d28a3018c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2105355313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bb57dd7a5d14a62bc23a9747d5d4ffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b08e0b-db", "ovs_interfaceid": "01b08e0b-db87-4721-91ce-d6d86f13ec15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.098859] env[61573]: DEBUG oslo_concurrency.lockutils [req-221550a1-31b3-47c2-844a-adb0ea6bb352 req-b856483c-038f-49f6-b028-550dbb6b73f9 service nova] Releasing lock "refresh_cache-f63a7f37-09ac-4fe8-a1a3-7e13eb158526" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.784645] env[61573]: DEBUG oslo_concurrency.lockutils [None req-43525257-a454-418c-8355-f17dade7cf69 tempest-ServerExternalEventsTest-1001139505 tempest-ServerExternalEventsTest-1001139505-project-member] Acquiring lock "413e21a0-5b33-44d1-9964-82e3da294808" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.785073] env[61573]: DEBUG oslo_concurrency.lockutils [None req-43525257-a454-418c-8355-f17dade7cf69 tempest-ServerExternalEventsTest-1001139505 tempest-ServerExternalEventsTest-1001139505-project-member] Lock "413e21a0-5b33-44d1-9964-82e3da294808" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.002598] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.044748] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.044928] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.045115] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.046028] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.048730] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 616.410234] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.410234] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.410234] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.627927] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] Acquiring lock "02a2cbdf-c522-4ba5-9914-ccf1c51e0130" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.628165] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] Lock "02a2cbdf-c522-4ba5-9914-ccf1c51e0130" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.657233] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] Acquiring lock "1e843567-f13d-40fa-94a0-931655307053" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.657339] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] Lock "1e843567-f13d-40fa-94a0-931655307053" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.686782] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] Acquiring lock "97a1e2dc-9c4c-419d-87aa-839f21dc6e23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.687350] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] Lock "97a1e2dc-9c4c-419d-87aa-839f21dc6e23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.403502] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.403827] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 617.403827] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 617.440056] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.440176] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.440493] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.440493] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.440636] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.440636] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.440746] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.440878] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.441057] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.441119] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 617.441253] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 617.441795] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.463887] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.464132] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.464297] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.464458] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 617.465587] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3fb418-d697-48bb-ab60-3ab49b2a61e6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.478321] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ac1c29-1352-4778-9612-52f1442690d8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.497877] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bf5a83-b49a-4399-83ac-99622cde55e9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.508213] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a23a5f2-4d9d-4555-9ea4-8ac7457b02d9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.544026] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180570MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 617.544026] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.544026] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.636345] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.636529] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8725d6e7-57cc-4d55-b21a-4aee65d5228b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.636590] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 933ffe74-6883-4676-9be8-f12e45be35e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.636695] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance da95334f-ebbf-4a7f-8492-ca310028c4dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.636917] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 9cb4ec1b-e422-491d-8ee9-0103c740ea5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.637086] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.637147] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a9cca36b-ee0e-42b3-9c26-61c9b0715312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.637338] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c445065e-68e5-4dda-ba5d-314dc2da12cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.637338] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.637459] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 617.667777] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.696474] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.709395] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.721183] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 549dbec8-1008-4930-8e2d-97cd11cfa45f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.734029] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance fe988112-9ee6-4fa7-8914-87119aaa2549 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.753624] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 91def60e-ad74-435f-a006-eb5672928010 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.766454] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 643dbc96-7e32-4442-ab44-cbd74b0ffd72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.780673] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f42d10b1-6733-4c39-9230-fd41dd82bb20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.795026] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance fbd512f6-5c22-4a40-846c-0fece5ee7bcb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.820330] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 37256e1b-8191-4bee-a3c3-dd31c99d9301 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.832426] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d0bfbba-2fec-4063-91f3-3152b3ea537e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.844029] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a8b2d4b2-731f-489a-8b11-c816e6827189 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.855907] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 65a29af2-4c3b-4556-ae95-adab1fa7aad4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.868845] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d55fe3b-3a7e-493d-b705-10241a2ac392 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.887915] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.903409] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 79b5dc73-2168-46b5-8045-13e92666a69a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.914984] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 413e21a0-5b33-44d1-9964-82e3da294808 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.927973] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 02a2cbdf-c522-4ba5-9914-ccf1c51e0130 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.940731] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1e843567-f13d-40fa-94a0-931655307053 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.981560] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 97a1e2dc-9c4c-419d-87aa-839f21dc6e23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 617.981829] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 617.982019] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '1', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_fde71aa69dbe431bb75848f76999b7d2': '1', 'io_workload': '10', 'num_proj_03ab001ffb3148e7bdb7b1d4dfa3ea88': '1', 'num_proj_3aabf5c6a1ed4a799a8d230cb0e37107': '1', 'num_proj_ad80964934624fed809a3285c0f1b748': '1', 'num_proj_5a9aa4dbe7894ffebca41f816a4eb2da': '1', 'num_proj_308f1e086ba943b9b9cf27a1da4eda0a': '2', 'num_proj_a5f0b55c024f4143a4e2a141e10c1db3': '1', 'num_proj_19a0cd3d99c04e37aba52b7de50f98c9': '1', 'num_proj_2bb57dd7a5d14a62bc23a9747d5d4ffe': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 618.416998] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42616186-3ab6-4aba-8ff8-8019cfa4d5a8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.427237] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c7ad62-7780-4f70-95b0-d1d9c7286b33 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.462833] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46249c74-8fb7-4336-95ce-05853b58c9e8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.471218] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf86385-6276-47e9-a566-ad92223993d0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.485534] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.495237] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.512192] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 618.512192] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.968s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.375268] env[61573]: DEBUG oslo_concurrency.lockutils [None req-94a577dd-a200-4671-9ce6-92aa0a85fb65 tempest-ServersV294TestFqdnHostnames-1409359785 tempest-ServersV294TestFqdnHostnames-1409359785-project-member] Acquiring lock "12a02fd4-ae5d-4e7f-96e1-27d50d54bc24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.375544] env[61573]: DEBUG oslo_concurrency.lockutils [None req-94a577dd-a200-4671-9ce6-92aa0a85fb65 tempest-ServersV294TestFqdnHostnames-1409359785 tempest-ServersV294TestFqdnHostnames-1409359785-project-member] Lock "12a02fd4-ae5d-4e7f-96e1-27d50d54bc24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.713623] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9e9c040e-cd91-4689-b976-22089c6b3210 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] Acquiring lock "3fb9b32d-193f-4968-8783-54d25253f94f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.713979] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9e9c040e-cd91-4689-b976-22089c6b3210 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] Lock "3fb9b32d-193f-4968-8783-54d25253f94f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.340117] env[61573]: DEBUG oslo_concurrency.lockutils [None req-310ecc1e-fdf1-44b0-9ab2-c1e6da2f250b tempest-ServerActionsV293TestJSON-1012733847 tempest-ServerActionsV293TestJSON-1012733847-project-member] Acquiring lock "12cd88c0-09c1-42de-8fdd-03139718ca17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.340451] env[61573]: DEBUG oslo_concurrency.lockutils [None req-310ecc1e-fdf1-44b0-9ab2-c1e6da2f250b tempest-ServerActionsV293TestJSON-1012733847 tempest-ServerActionsV293TestJSON-1012733847-project-member] Lock "12cd88c0-09c1-42de-8fdd-03139718ca17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.118692] env[61573]: WARNING oslo_vmware.rw_handles [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 653.118692] env[61573]: ERROR oslo_vmware.rw_handles [ 653.119280] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/218b8025-6762-4bbc-9e1f-92a296172b67/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 653.120809] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 653.121054] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Copying Virtual Disk [datastore2] vmware_temp/218b8025-6762-4bbc-9e1f-92a296172b67/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/218b8025-6762-4bbc-9e1f-92a296172b67/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 653.121359] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9caabc9-eed4-4b67-9377-b42da30b2810 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.131353] env[61573]: DEBUG oslo_vmware.api [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Waiting for the task: (returnval){ [ 653.131353] env[61573]: value = "task-4836748" [ 653.131353] env[61573]: _type = "Task" [ 653.131353] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.141684] env[61573]: DEBUG oslo_vmware.api [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Task: {'id': task-4836748, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.642192] env[61573]: DEBUG oslo_vmware.exceptions [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 653.642498] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.643165] env[61573]: ERROR nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 653.643165] env[61573]: Faults: ['InvalidArgument'] [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Traceback (most recent call last): [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] yield resources [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] self.driver.spawn(context, instance, image_meta, [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] self._fetch_image_if_missing(context, vi) [ 653.643165] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] image_cache(vi, tmp_image_ds_loc) [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] vm_util.copy_virtual_disk( [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] session._wait_for_task(vmdk_copy_task) [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] return self.wait_for_task(task_ref) [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] return evt.wait() [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] result = hub.switch() [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 653.643471] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] return self.greenlet.switch() [ 653.643773] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 653.643773] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] self.f(*self.args, **self.kw) [ 653.643773] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 653.643773] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] raise exceptions.translate_fault(task_info.error) [ 653.643773] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 653.643773] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Faults: ['InvalidArgument'] [ 653.643773] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] [ 653.643773] env[61573]: INFO nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Terminating instance [ 653.644975] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.645198] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.645591] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69d93238-34eb-43f6-826f-0bcd206c51d7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.647929] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 653.648139] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 653.648992] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ff6c38-5476-4295-bb4d-411173c47741 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.657423] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 653.657804] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c5e7e6f-5eb3-4602-9fbf-1f2be8c04088 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.661621] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.661621] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 653.662372] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-246fca64-18e9-4447-9990-ead938b5f833 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.668596] env[61573]: DEBUG oslo_vmware.api [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Waiting for the task: (returnval){ [ 653.668596] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52210856-db61-c6b0-326c-82a478486a2c" [ 653.668596] env[61573]: _type = "Task" [ 653.668596] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.677111] env[61573]: DEBUG oslo_vmware.api [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52210856-db61-c6b0-326c-82a478486a2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.741877] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 653.741877] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 653.741877] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Deleting the datastore file [datastore2] 8725d6e7-57cc-4d55-b21a-4aee65d5228b {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 653.742395] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14945e0d-173d-47f4-9780-3cff460ce91f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.750274] env[61573]: DEBUG oslo_vmware.api [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Waiting for the task: (returnval){ [ 653.750274] env[61573]: value = "task-4836750" [ 653.750274] env[61573]: _type = "Task" [ 653.750274] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.759429] env[61573]: DEBUG oslo_vmware.api [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Task: {'id': task-4836750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.181013] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 654.181013] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Creating directory with path [datastore2] vmware_temp/ed057f3d-f414-4210-9317-39c57d3e4e4e/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.181013] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1cc5409-1729-4716-8675-be931dad7666 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.194119] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Created directory with path [datastore2] vmware_temp/ed057f3d-f414-4210-9317-39c57d3e4e4e/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.194346] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Fetch image to [datastore2] vmware_temp/ed057f3d-f414-4210-9317-39c57d3e4e4e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 654.194505] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/ed057f3d-f414-4210-9317-39c57d3e4e4e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 654.195310] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b40990d-e111-4ccb-891d-7243fb7f8fc1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.202872] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b9a90c-3cc0-4e01-b6b0-8519ecd1586b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.212902] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f67f9f-87e2-4853-92de-13ff2cf12547 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.243966] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e77356-d918-45f3-87b0-4d40d99e632c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.250581] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5b95bae0-709b-4520-9901-eb59d1aaa968 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.262282] env[61573]: DEBUG oslo_vmware.api [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Task: {'id': task-4836750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071454} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.262542] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 654.262729] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 654.262937] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 654.263132] env[61573]: INFO nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 654.265287] env[61573]: DEBUG nova.compute.claims [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 654.265469] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.265680] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.278041] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 654.333633] env[61573]: DEBUG oslo_vmware.rw_handles [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ed057f3d-f414-4210-9317-39c57d3e4e4e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 654.395604] env[61573]: DEBUG oslo_vmware.rw_handles [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 654.395604] env[61573]: DEBUG oslo_vmware.rw_handles [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ed057f3d-f414-4210-9317-39c57d3e4e4e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 654.786496] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a909ac99-92f8-4a83-b064-382d64665a50 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.795942] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fa2975-1fdf-47f0-8a19-919aef619ba2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.828574] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e166bb-b7d3-42a2-aa3e-e88132899505 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.836854] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b68ced-dc8c-4fe1-8527-403acdd96e07 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.850526] env[61573]: DEBUG nova.compute.provider_tree [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.859920] env[61573]: DEBUG nova.scheduler.client.report [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 654.877105] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.611s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.877660] env[61573]: ERROR nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 654.877660] env[61573]: Faults: ['InvalidArgument'] [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Traceback (most recent call last): [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] self.driver.spawn(context, instance, image_meta, [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] self._fetch_image_if_missing(context, vi) [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] image_cache(vi, tmp_image_ds_loc) [ 654.877660] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] vm_util.copy_virtual_disk( [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] session._wait_for_task(vmdk_copy_task) [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] return self.wait_for_task(task_ref) [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] return evt.wait() [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] result = hub.switch() [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] return self.greenlet.switch() [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 654.877965] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] self.f(*self.args, **self.kw) [ 654.878239] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 654.878239] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] raise exceptions.translate_fault(task_info.error) [ 654.878239] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 654.878239] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Faults: ['InvalidArgument'] [ 654.878239] env[61573]: ERROR nova.compute.manager [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] [ 654.878500] env[61573]: DEBUG nova.compute.utils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 654.880049] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Build of instance 8725d6e7-57cc-4d55-b21a-4aee65d5228b was re-scheduled: A specified parameter was not correct: fileType [ 654.880049] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 654.880391] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 654.880572] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 654.880721] env[61573]: DEBUG nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 654.880884] env[61573]: DEBUG nova.network.neutron [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 655.339861] env[61573]: DEBUG nova.network.neutron [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.354679] env[61573]: INFO nova.compute.manager [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] [instance: 8725d6e7-57cc-4d55-b21a-4aee65d5228b] Took 0.47 seconds to deallocate network for instance. [ 655.489968] env[61573]: INFO nova.scheduler.client.report [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Deleted allocations for instance 8725d6e7-57cc-4d55-b21a-4aee65d5228b [ 655.515255] env[61573]: DEBUG oslo_concurrency.lockutils [None req-218d1b98-b092-4c5a-b580-7c7c351630ec tempest-ServersAdminNegativeTestJSON-1557566520 tempest-ServersAdminNegativeTestJSON-1557566520-project-member] Lock "8725d6e7-57cc-4d55-b21a-4aee65d5228b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.745s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.538482] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 655.598320] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.598575] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.600082] env[61573]: INFO nova.compute.claims [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.097263] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644c3b7e-c36f-4a59-8508-7d2522bc7c44 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.106009] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392740ce-39f4-416f-8d52-b71800693666 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.138730] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217f7acb-b46b-4202-8c3c-10f7f4ba31d6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.147650] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5454bb05-ec23-4856-9c8f-bb1cf53fea67 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.162475] env[61573]: DEBUG nova.compute.provider_tree [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.172737] env[61573]: DEBUG nova.scheduler.client.report [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 656.190814] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.592s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.191703] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 656.230598] env[61573]: DEBUG nova.compute.utils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 656.231990] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 656.232273] env[61573]: DEBUG nova.network.neutron [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 656.245108] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 656.327785] env[61573]: DEBUG nova.policy [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c0cae3eb2af48fc96305ec4f32941c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6179168f3e4f4df09875705a0ee778df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 656.332627] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 656.359288] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 656.359675] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 656.359675] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.359968] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 656.360133] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.360285] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 656.360494] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 656.360651] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 656.360929] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 656.361127] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 656.361305] env[61573]: DEBUG nova.virt.hardware [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 656.362239] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a611ce21-4a34-4323-a3e1-a30407d1173b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.372108] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f2f49f-92d1-4007-92b3-ef5c60c9a245 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.921496] env[61573]: DEBUG nova.network.neutron [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Successfully created port: 55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 657.870831] env[61573]: DEBUG nova.compute.manager [req-aa0f4405-9a4e-4e84-8848-94bea98d8bae req-a949deee-498a-4190-9f12-c90f40b6f495 service nova] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Received event network-vif-plugged-55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 657.872041] env[61573]: DEBUG oslo_concurrency.lockutils [req-aa0f4405-9a4e-4e84-8848-94bea98d8bae req-a949deee-498a-4190-9f12-c90f40b6f495 service nova] Acquiring lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.872041] env[61573]: DEBUG oslo_concurrency.lockutils [req-aa0f4405-9a4e-4e84-8848-94bea98d8bae req-a949deee-498a-4190-9f12-c90f40b6f495 service nova] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.872041] env[61573]: DEBUG oslo_concurrency.lockutils [req-aa0f4405-9a4e-4e84-8848-94bea98d8bae req-a949deee-498a-4190-9f12-c90f40b6f495 service nova] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.872041] env[61573]: DEBUG nova.compute.manager [req-aa0f4405-9a4e-4e84-8848-94bea98d8bae req-a949deee-498a-4190-9f12-c90f40b6f495 service nova] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] No waiting events found dispatching network-vif-plugged-55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 657.872973] env[61573]: WARNING nova.compute.manager [req-aa0f4405-9a4e-4e84-8848-94bea98d8bae req-a949deee-498a-4190-9f12-c90f40b6f495 service nova] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Received unexpected event network-vif-plugged-55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac for instance with vm_state building and task_state spawning. [ 658.114144] env[61573]: DEBUG nova.network.neutron [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Successfully updated port: 55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 658.136555] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "refresh_cache-f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.136555] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquired lock "refresh_cache-f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.136555] env[61573]: DEBUG nova.network.neutron [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 658.204036] env[61573]: DEBUG nova.network.neutron [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.500424] env[61573]: DEBUG nova.network.neutron [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Updating instance_info_cache with network_info: [{"id": "55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac", "address": "fa:16:3e:af:38:3f", "network": {"id": "b06d727b-8d79-4348-94ce-318da6a651e8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-976768313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6179168f3e4f4df09875705a0ee778df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1c8fdf9-970c-4ae0-b6d9-f1015196b552", "external-id": "nsx-vlan-transportzone-993", "segmentation_id": 993, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55af1fec-a9", "ovs_interfaceid": "55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.517232] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Releasing lock "refresh_cache-f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.517563] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Instance network_info: |[{"id": "55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac", "address": "fa:16:3e:af:38:3f", "network": {"id": "b06d727b-8d79-4348-94ce-318da6a651e8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-976768313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6179168f3e4f4df09875705a0ee778df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1c8fdf9-970c-4ae0-b6d9-f1015196b552", "external-id": "nsx-vlan-transportzone-993", "segmentation_id": 993, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55af1fec-a9", "ovs_interfaceid": "55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 658.518056] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:38:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1c8fdf9-970c-4ae0-b6d9-f1015196b552', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.530848] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Creating folder: Project (6179168f3e4f4df09875705a0ee778df). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 658.530848] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a769d3a-62fc-4fb5-9d1c-053a4bf850c5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.542817] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Created folder: Project (6179168f3e4f4df09875705a0ee778df) in parent group-v942801. [ 658.543069] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Creating folder: Instances. Parent ref: group-v942837. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 658.543329] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b2f7fea-969b-4816-8105-5dc9bffafc9a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.553698] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Created folder: Instances in parent group-v942837. [ 658.553977] env[61573]: DEBUG oslo.service.loopingcall [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.554194] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 658.554449] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80c12a2e-a54e-48c5-bc85-67246000736c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.575127] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 658.575127] env[61573]: value = "task-4836753" [ 658.575127] env[61573]: _type = "Task" [ 658.575127] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.586382] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836753, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.088041] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836753, 'name': CreateVM_Task, 'duration_secs': 0.32806} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.088277] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 659.088847] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.089028] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.089346] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 659.089595] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d782d7c-2091-41d2-b1b7-d1f4e33911e6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.094640] env[61573]: DEBUG oslo_vmware.api [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for the task: (returnval){ [ 659.094640] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52f76740-243e-42fe-1253-915b914a6339" [ 659.094640] env[61573]: _type = "Task" [ 659.094640] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.103510] env[61573]: DEBUG oslo_vmware.api [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52f76740-243e-42fe-1253-915b914a6339, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.606470] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.606752] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 659.607102] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.086526] env[61573]: DEBUG nova.compute.manager [req-b0ca6b89-8a1b-4859-8d67-1ae372661b71 req-ac981104-c935-4a1f-869b-cd020afc01d2 service nova] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Received event network-changed-55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 660.086729] env[61573]: DEBUG nova.compute.manager [req-b0ca6b89-8a1b-4859-8d67-1ae372661b71 req-ac981104-c935-4a1f-869b-cd020afc01d2 service nova] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Refreshing instance network info cache due to event network-changed-55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 660.087121] env[61573]: DEBUG oslo_concurrency.lockutils [req-b0ca6b89-8a1b-4859-8d67-1ae372661b71 req-ac981104-c935-4a1f-869b-cd020afc01d2 service nova] Acquiring lock "refresh_cache-f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.087300] env[61573]: DEBUG oslo_concurrency.lockutils [req-b0ca6b89-8a1b-4859-8d67-1ae372661b71 req-ac981104-c935-4a1f-869b-cd020afc01d2 service nova] Acquired lock "refresh_cache-f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.087471] env[61573]: DEBUG nova.network.neutron [req-b0ca6b89-8a1b-4859-8d67-1ae372661b71 req-ac981104-c935-4a1f-869b-cd020afc01d2 service nova] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Refreshing network info cache for port 55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 660.473589] env[61573]: DEBUG nova.network.neutron [req-b0ca6b89-8a1b-4859-8d67-1ae372661b71 req-ac981104-c935-4a1f-869b-cd020afc01d2 service nova] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Updated VIF entry in instance network info cache for port 55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 660.473991] env[61573]: DEBUG nova.network.neutron [req-b0ca6b89-8a1b-4859-8d67-1ae372661b71 req-ac981104-c935-4a1f-869b-cd020afc01d2 service nova] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Updating instance_info_cache with network_info: [{"id": "55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac", "address": "fa:16:3e:af:38:3f", "network": {"id": "b06d727b-8d79-4348-94ce-318da6a651e8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-976768313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6179168f3e4f4df09875705a0ee778df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1c8fdf9-970c-4ae0-b6d9-f1015196b552", "external-id": "nsx-vlan-transportzone-993", "segmentation_id": 993, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55af1fec-a9", "ovs_interfaceid": "55af1fec-a9d3-4c03-8da1-6d5fc43fd9ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.484224] env[61573]: DEBUG oslo_concurrency.lockutils [req-b0ca6b89-8a1b-4859-8d67-1ae372661b71 req-ac981104-c935-4a1f-869b-cd020afc01d2 service nova] Releasing lock "refresh_cache-f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.232747] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "0d91230d-849f-4e64-8685-5298ee5ea5b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.232747] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.473866] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.474236] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.474439] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.474439] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 677.404033] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.404251] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 677.404372] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 677.426695] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.426867] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.427135] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.427322] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.427375] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.428796] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.428796] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.428796] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.428796] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.428796] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 677.430272] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 677.430272] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.430272] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.430272] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.430272] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.443931] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.444177] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.444343] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.444496] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 677.445595] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1393e5eb-1fa8-4327-8107-0abf60d5f30a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.454690] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8449877-7bff-4b35-967c-bfb1e4731949 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.468971] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed28afe-33f7-4d89-842a-9cf612868293 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.476327] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31f78c1-9639-42c1-8c4e-723d59750739 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.505526] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180568MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 677.505691] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.505892] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.577971] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.578230] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 933ffe74-6883-4676-9be8-f12e45be35e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.578373] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance da95334f-ebbf-4a7f-8492-ca310028c4dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.578496] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 9cb4ec1b-e422-491d-8ee9-0103c740ea5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.578617] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.578734] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a9cca36b-ee0e-42b3-9c26-61c9b0715312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.578849] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c445065e-68e5-4dda-ba5d-314dc2da12cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.578965] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.579092] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.579205] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 677.591095] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.601251] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.611161] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 549dbec8-1008-4930-8e2d-97cd11cfa45f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.621825] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance fe988112-9ee6-4fa7-8914-87119aaa2549 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.631802] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 91def60e-ad74-435f-a006-eb5672928010 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.642178] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 643dbc96-7e32-4442-ab44-cbd74b0ffd72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.651867] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f42d10b1-6733-4c39-9230-fd41dd82bb20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.662355] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance fbd512f6-5c22-4a40-846c-0fece5ee7bcb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.677102] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 37256e1b-8191-4bee-a3c3-dd31c99d9301 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.687382] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d0bfbba-2fec-4063-91f3-3152b3ea537e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.698511] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a8b2d4b2-731f-489a-8b11-c816e6827189 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.710679] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 65a29af2-4c3b-4556-ae95-adab1fa7aad4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.720903] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d55fe3b-3a7e-493d-b705-10241a2ac392 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.730529] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.741055] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 79b5dc73-2168-46b5-8045-13e92666a69a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.750635] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 413e21a0-5b33-44d1-9964-82e3da294808 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.760682] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 02a2cbdf-c522-4ba5-9914-ccf1c51e0130 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.771365] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1e843567-f13d-40fa-94a0-931655307053 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.781033] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 97a1e2dc-9c4c-419d-87aa-839f21dc6e23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.790657] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 12a02fd4-ae5d-4e7f-96e1-27d50d54bc24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.801595] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3fb9b32d-193f-4968-8783-54d25253f94f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.812333] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 12cd88c0-09c1-42de-8fdd-03139718ca17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.822764] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.823018] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 677.823182] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '2', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_03ab001ffb3148e7bdb7b1d4dfa3ea88': '1', 'io_workload': '10', 'num_proj_3aabf5c6a1ed4a799a8d230cb0e37107': '1', 'num_proj_ad80964934624fed809a3285c0f1b748': '1', 'num_proj_5a9aa4dbe7894ffebca41f816a4eb2da': '1', 'num_proj_308f1e086ba943b9b9cf27a1da4eda0a': '2', 'num_proj_a5f0b55c024f4143a4e2a141e10c1db3': '1', 'num_proj_19a0cd3d99c04e37aba52b7de50f98c9': '1', 'num_proj_2bb57dd7a5d14a62bc23a9747d5d4ffe': '1', 'num_proj_6179168f3e4f4df09875705a0ee778df': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 678.230960] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fa6192-415e-4721-9798-eac28b9fb24a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.239043] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f61b916-a482-47f6-886b-55caab329d5b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.270416] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2065e2b0-83af-4a51-b4bd-ff77ddf8165a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.278774] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce954f96-823b-4935-a502-79dcd7066fa8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.292736] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.301281] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 678.314732] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 678.316033] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.809s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.309957] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.472489] env[61573]: WARNING oslo_vmware.rw_handles [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 701.472489] env[61573]: ERROR oslo_vmware.rw_handles [ 701.473097] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/ed057f3d-f414-4210-9317-39c57d3e4e4e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 701.474487] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 701.474736] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Copying Virtual Disk [datastore2] vmware_temp/ed057f3d-f414-4210-9317-39c57d3e4e4e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/ed057f3d-f414-4210-9317-39c57d3e4e4e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 701.475107] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3935e2f-fb95-4af8-bee4-6c382db49ed1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.483881] env[61573]: DEBUG oslo_vmware.api [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Waiting for the task: (returnval){ [ 701.483881] env[61573]: value = "task-4836754" [ 701.483881] env[61573]: _type = "Task" [ 701.483881] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.492525] env[61573]: DEBUG oslo_vmware.api [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Task: {'id': task-4836754, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.995042] env[61573]: DEBUG oslo_vmware.exceptions [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 701.995042] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.995042] env[61573]: ERROR nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 701.995042] env[61573]: Faults: ['InvalidArgument'] [ 701.995042] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Traceback (most recent call last): [ 701.995042] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 701.995042] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] yield resources [ 701.995042] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] self.driver.spawn(context, instance, image_meta, [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] self._fetch_image_if_missing(context, vi) [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] image_cache(vi, tmp_image_ds_loc) [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] vm_util.copy_virtual_disk( [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] session._wait_for_task(vmdk_copy_task) [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 701.995484] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] return self.wait_for_task(task_ref) [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] return evt.wait() [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] result = hub.switch() [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] return self.greenlet.switch() [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] self.f(*self.args, **self.kw) [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] raise exceptions.translate_fault(task_info.error) [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Faults: ['InvalidArgument'] [ 701.995945] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] [ 701.996337] env[61573]: INFO nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Terminating instance [ 701.996949] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.997170] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.997418] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04419ce0-71b0-4d20-b49c-5796adb1d918 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.999637] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 701.999827] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 702.000601] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec59b714-eae0-429b-8dfc-94f5d67ef4ba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.007422] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 702.007651] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6c03743-a056-4c79-8891-21f3120c63e8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.010043] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 702.010238] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 702.011278] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f0b93b-b2f5-42c2-80d8-2f2a6f5d1d6e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.015879] env[61573]: DEBUG oslo_vmware.api [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Waiting for the task: (returnval){ [ 702.015879] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]522995f0-3da1-7f38-5a75-5a5a55003981" [ 702.015879] env[61573]: _type = "Task" [ 702.015879] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.027652] env[61573]: DEBUG oslo_vmware.api [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]522995f0-3da1-7f38-5a75-5a5a55003981, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.078450] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 702.078620] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 702.078782] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Deleting the datastore file [datastore2] 933ffe74-6883-4676-9be8-f12e45be35e0 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 702.079120] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58f4364b-983d-40ec-a356-574ba4122253 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.086846] env[61573]: DEBUG oslo_vmware.api [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Waiting for the task: (returnval){ [ 702.086846] env[61573]: value = "task-4836756" [ 702.086846] env[61573]: _type = "Task" [ 702.086846] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.095741] env[61573]: DEBUG oslo_vmware.api [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Task: {'id': task-4836756, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.527897] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 702.528305] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Creating directory with path [datastore2] vmware_temp/de40d851-4ac6-4021-8989-e4f8e4e387d5/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 702.528667] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5a9baaa-438a-43a0-9826-ccb5f1404a03 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.542050] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Created directory with path [datastore2] vmware_temp/de40d851-4ac6-4021-8989-e4f8e4e387d5/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 702.542050] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Fetch image to [datastore2] vmware_temp/de40d851-4ac6-4021-8989-e4f8e4e387d5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 702.542050] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/de40d851-4ac6-4021-8989-e4f8e4e387d5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 702.542511] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50dfef9-9ed5-45e4-893f-c2ed7f0b6524 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.550084] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02eb7d74-fe10-4754-a16b-ad5d18067905 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.560171] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4515075f-756f-42b4-94ac-eab63482ca9c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.597097] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f470fc0-8010-47c2-89d2-85ff735ca480 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.605712] env[61573]: DEBUG oslo_vmware.api [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Task: {'id': task-4836756, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076537} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.607396] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 702.608021] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 702.608021] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 702.608149] env[61573]: INFO nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 702.609869] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3f724483-e9f0-4c8a-a392-979ddc5785c3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.611962] env[61573]: DEBUG nova.compute.claims [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 702.612152] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.612363] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.635404] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 702.803996] env[61573]: DEBUG oslo_vmware.rw_handles [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de40d851-4ac6-4021-8989-e4f8e4e387d5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 702.863466] env[61573]: DEBUG oslo_vmware.rw_handles [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 702.863770] env[61573]: DEBUG oslo_vmware.rw_handles [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de40d851-4ac6-4021-8989-e4f8e4e387d5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 703.137593] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e73bc4-7832-4718-9aa8-5c4dc341f480 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.145504] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9529f0-f620-4099-a35b-e3ce2662b4c6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.176642] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee463c4-80bd-4790-bf17-58af200fee24 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.184854] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e1a99b-cb48-41ab-a556-9b59e2ab6bc0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.198419] env[61573]: DEBUG nova.compute.provider_tree [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.207953] env[61573]: DEBUG nova.scheduler.client.report [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 703.225395] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.613s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.225958] env[61573]: ERROR nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 703.225958] env[61573]: Faults: ['InvalidArgument'] [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Traceback (most recent call last): [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] self.driver.spawn(context, instance, image_meta, [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] self._fetch_image_if_missing(context, vi) [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] image_cache(vi, tmp_image_ds_loc) [ 703.225958] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] vm_util.copy_virtual_disk( [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] session._wait_for_task(vmdk_copy_task) [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] return self.wait_for_task(task_ref) [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] return evt.wait() [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] result = hub.switch() [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] return self.greenlet.switch() [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 703.226346] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] self.f(*self.args, **self.kw) [ 703.226666] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 703.226666] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] raise exceptions.translate_fault(task_info.error) [ 703.226666] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 703.226666] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Faults: ['InvalidArgument'] [ 703.226666] env[61573]: ERROR nova.compute.manager [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] [ 703.226666] env[61573]: DEBUG nova.compute.utils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 703.228164] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Build of instance 933ffe74-6883-4676-9be8-f12e45be35e0 was re-scheduled: A specified parameter was not correct: fileType [ 703.228164] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 703.228543] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 703.228710] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 703.228886] env[61573]: DEBUG nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 703.229058] env[61573]: DEBUG nova.network.neutron [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.760857] env[61573]: DEBUG nova.network.neutron [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.774967] env[61573]: INFO nova.compute.manager [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] [instance: 933ffe74-6883-4676-9be8-f12e45be35e0] Took 0.54 seconds to deallocate network for instance. [ 703.879983] env[61573]: INFO nova.scheduler.client.report [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Deleted allocations for instance 933ffe74-6883-4676-9be8-f12e45be35e0 [ 703.904078] env[61573]: DEBUG oslo_concurrency.lockutils [None req-52fbd7d2-ac85-4e8d-a218-2f4b2ffc8919 tempest-FloatingIPsAssociationNegativeTestJSON-733145256 tempest-FloatingIPsAssociationNegativeTestJSON-733145256-project-member] Lock "933ffe74-6883-4676-9be8-f12e45be35e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.464s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.921395] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 703.971942] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.972790] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.973977] env[61573]: INFO nova.compute.claims [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.429728] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a84b03-36b1-426d-a768-ff7419c83d0f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.438197] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0629bd-eef0-4d9d-bb03-7df53f30059c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.469577] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcf156d-d2f3-4f83-be4f-8f32b493127a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.478243] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d748786f-4c7a-4f40-851c-5296d3ef4b3a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.492329] env[61573]: DEBUG nova.compute.provider_tree [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.502782] env[61573]: DEBUG nova.scheduler.client.report [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 704.517550] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.545s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.518102] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 704.559577] env[61573]: DEBUG nova.compute.utils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 704.561208] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 704.561423] env[61573]: DEBUG nova.network.neutron [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 704.570035] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 704.640364] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 704.644013] env[61573]: DEBUG nova.policy [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c0cae3eb2af48fc96305ec4f32941c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6179168f3e4f4df09875705a0ee778df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 704.669757] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.670051] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.670233] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.670420] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.670567] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.670715] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.670922] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.671123] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.671301] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.671468] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.671643] env[61573]: DEBUG nova.virt.hardware [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.672540] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4c4b7c-b865-4ff3-b51f-e2ed42bc48b8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.681236] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b610cb6-abb3-4f88-af0e-ca330d306c5c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.099279] env[61573]: DEBUG nova.network.neutron [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Successfully created port: ea4f90aa-5fbc-4854-b454-05f36598fc2f {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.197726] env[61573]: DEBUG nova.network.neutron [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Successfully updated port: ea4f90aa-5fbc-4854-b454-05f36598fc2f {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 706.212523] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "refresh_cache-277ddab5-2fef-4c64-ab26-22f1be2ca4f8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.212722] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquired lock "refresh_cache-277ddab5-2fef-4c64-ab26-22f1be2ca4f8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.212722] env[61573]: DEBUG nova.network.neutron [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 706.277432] env[61573]: DEBUG nova.network.neutron [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.382862] env[61573]: DEBUG nova.compute.manager [req-b29144ce-366b-431a-912a-badb8a276b78 req-5155c5db-1914-47e2-9a50-e89c88408334 service nova] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Received event network-vif-plugged-ea4f90aa-5fbc-4854-b454-05f36598fc2f {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 706.383101] env[61573]: DEBUG oslo_concurrency.lockutils [req-b29144ce-366b-431a-912a-badb8a276b78 req-5155c5db-1914-47e2-9a50-e89c88408334 service nova] Acquiring lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.383341] env[61573]: DEBUG oslo_concurrency.lockutils [req-b29144ce-366b-431a-912a-badb8a276b78 req-5155c5db-1914-47e2-9a50-e89c88408334 service nova] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.383510] env[61573]: DEBUG oslo_concurrency.lockutils [req-b29144ce-366b-431a-912a-badb8a276b78 req-5155c5db-1914-47e2-9a50-e89c88408334 service nova] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.384102] env[61573]: DEBUG nova.compute.manager [req-b29144ce-366b-431a-912a-badb8a276b78 req-5155c5db-1914-47e2-9a50-e89c88408334 service nova] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] No waiting events found dispatching network-vif-plugged-ea4f90aa-5fbc-4854-b454-05f36598fc2f {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 706.384396] env[61573]: WARNING nova.compute.manager [req-b29144ce-366b-431a-912a-badb8a276b78 req-5155c5db-1914-47e2-9a50-e89c88408334 service nova] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Received unexpected event network-vif-plugged-ea4f90aa-5fbc-4854-b454-05f36598fc2f for instance with vm_state building and task_state spawning. [ 706.557165] env[61573]: DEBUG nova.network.neutron [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Updating instance_info_cache with network_info: [{"id": "ea4f90aa-5fbc-4854-b454-05f36598fc2f", "address": "fa:16:3e:91:06:45", "network": {"id": "b06d727b-8d79-4348-94ce-318da6a651e8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-976768313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6179168f3e4f4df09875705a0ee778df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1c8fdf9-970c-4ae0-b6d9-f1015196b552", "external-id": "nsx-vlan-transportzone-993", "segmentation_id": 993, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea4f90aa-5f", "ovs_interfaceid": "ea4f90aa-5fbc-4854-b454-05f36598fc2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.567377] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Releasing lock "refresh_cache-277ddab5-2fef-4c64-ab26-22f1be2ca4f8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.569746] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Instance network_info: |[{"id": "ea4f90aa-5fbc-4854-b454-05f36598fc2f", "address": "fa:16:3e:91:06:45", "network": {"id": "b06d727b-8d79-4348-94ce-318da6a651e8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-976768313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6179168f3e4f4df09875705a0ee778df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1c8fdf9-970c-4ae0-b6d9-f1015196b552", "external-id": "nsx-vlan-transportzone-993", "segmentation_id": 993, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea4f90aa-5f", "ovs_interfaceid": "ea4f90aa-5fbc-4854-b454-05f36598fc2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 706.570638] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:06:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1c8fdf9-970c-4ae0-b6d9-f1015196b552', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea4f90aa-5fbc-4854-b454-05f36598fc2f', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.578362] env[61573]: DEBUG oslo.service.loopingcall [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.578952] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 706.579295] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52592d72-01d0-4dd7-a23b-cd0b870b28dd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.602578] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.602578] env[61573]: value = "task-4836757" [ 706.602578] env[61573]: _type = "Task" [ 706.602578] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.616225] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836757, 'name': CreateVM_Task} progress is 5%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.115875] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836757, 'name': CreateVM_Task, 'duration_secs': 0.307094} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.116166] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 707.117056] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.117282] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.117696] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 707.117984] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29d1e4c6-7d8f-4ca1-b5a1-9a16636a0eff {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.124161] env[61573]: DEBUG oslo_vmware.api [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for the task: (returnval){ [ 707.124161] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5295e1d0-88ba-7cb7-6871-9f70d1590d53" [ 707.124161] env[61573]: _type = "Task" [ 707.124161] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.135785] env[61573]: DEBUG oslo_vmware.api [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5295e1d0-88ba-7cb7-6871-9f70d1590d53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.634917] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.635333] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.635397] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.489889] env[61573]: DEBUG nova.compute.manager [req-e990dccf-76e6-4b25-a82a-330ae5a30674 req-ef46189d-761c-4c05-970d-a8db45616ce8 service nova] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Received event network-changed-ea4f90aa-5fbc-4854-b454-05f36598fc2f {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 708.490605] env[61573]: DEBUG nova.compute.manager [req-e990dccf-76e6-4b25-a82a-330ae5a30674 req-ef46189d-761c-4c05-970d-a8db45616ce8 service nova] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Refreshing instance network info cache due to event network-changed-ea4f90aa-5fbc-4854-b454-05f36598fc2f. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 708.490605] env[61573]: DEBUG oslo_concurrency.lockutils [req-e990dccf-76e6-4b25-a82a-330ae5a30674 req-ef46189d-761c-4c05-970d-a8db45616ce8 service nova] Acquiring lock "refresh_cache-277ddab5-2fef-4c64-ab26-22f1be2ca4f8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.490605] env[61573]: DEBUG oslo_concurrency.lockutils [req-e990dccf-76e6-4b25-a82a-330ae5a30674 req-ef46189d-761c-4c05-970d-a8db45616ce8 service nova] Acquired lock "refresh_cache-277ddab5-2fef-4c64-ab26-22f1be2ca4f8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.490763] env[61573]: DEBUG nova.network.neutron [req-e990dccf-76e6-4b25-a82a-330ae5a30674 req-ef46189d-761c-4c05-970d-a8db45616ce8 service nova] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Refreshing network info cache for port ea4f90aa-5fbc-4854-b454-05f36598fc2f {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 708.895940] env[61573]: DEBUG nova.network.neutron [req-e990dccf-76e6-4b25-a82a-330ae5a30674 req-ef46189d-761c-4c05-970d-a8db45616ce8 service nova] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Updated VIF entry in instance network info cache for port ea4f90aa-5fbc-4854-b454-05f36598fc2f. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 708.896326] env[61573]: DEBUG nova.network.neutron [req-e990dccf-76e6-4b25-a82a-330ae5a30674 req-ef46189d-761c-4c05-970d-a8db45616ce8 service nova] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Updating instance_info_cache with network_info: [{"id": "ea4f90aa-5fbc-4854-b454-05f36598fc2f", "address": "fa:16:3e:91:06:45", "network": {"id": "b06d727b-8d79-4348-94ce-318da6a651e8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-976768313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6179168f3e4f4df09875705a0ee778df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1c8fdf9-970c-4ae0-b6d9-f1015196b552", "external-id": "nsx-vlan-transportzone-993", "segmentation_id": 993, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea4f90aa-5f", "ovs_interfaceid": "ea4f90aa-5fbc-4854-b454-05f36598fc2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.907036] env[61573]: DEBUG oslo_concurrency.lockutils [req-e990dccf-76e6-4b25-a82a-330ae5a30674 req-ef46189d-761c-4c05-970d-a8db45616ce8 service nova] Releasing lock "refresh_cache-277ddab5-2fef-4c64-ab26-22f1be2ca4f8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.786263] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquiring lock "36a5ff6d-5123-4323-8e86-3529828af0ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.786619] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.188573] env[61573]: DEBUG oslo_concurrency.lockutils [None req-96c9dd6a-c590-4347-a8ae-e1730a40a27b tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquiring lock "46dd382d-f6d0-4bd5-b027-c07d01a9a595" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.188937] env[61573]: DEBUG oslo_concurrency.lockutils [None req-96c9dd6a-c590-4347-a8ae-e1730a40a27b tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "46dd382d-f6d0-4bd5-b027-c07d01a9a595" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.399121] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.426138] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.426342] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 737.405116] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.405116] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.405116] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.403646] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.403907] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.415645] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.415927] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.416046] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.416185] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 738.417273] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb4ffa1-189f-4aea-82cf-05d6678c5992 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.427536] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e58589-9dad-4e61-9a71-82636dbedf84 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.441559] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f55838-7d94-4a7a-9205-5334377c46f6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.448220] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93944cf-8aa8-4607-ab7a-a31a7d294215 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.477066] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180513MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 738.477231] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.477427] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.555980] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.556109] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance da95334f-ebbf-4a7f-8492-ca310028c4dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.556242] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 9cb4ec1b-e422-491d-8ee9-0103c740ea5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.556366] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.556483] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a9cca36b-ee0e-42b3-9c26-61c9b0715312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.556597] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c445065e-68e5-4dda-ba5d-314dc2da12cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.556711] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.556821] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.556931] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.557051] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 738.570859] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.581716] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 549dbec8-1008-4930-8e2d-97cd11cfa45f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.592146] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance fe988112-9ee6-4fa7-8914-87119aaa2549 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.603200] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 91def60e-ad74-435f-a006-eb5672928010 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.615280] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 643dbc96-7e32-4442-ab44-cbd74b0ffd72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.625113] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f42d10b1-6733-4c39-9230-fd41dd82bb20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.638457] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance fbd512f6-5c22-4a40-846c-0fece5ee7bcb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.648521] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 37256e1b-8191-4bee-a3c3-dd31c99d9301 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.659578] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d0bfbba-2fec-4063-91f3-3152b3ea537e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.672576] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a8b2d4b2-731f-489a-8b11-c816e6827189 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.686419] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 65a29af2-4c3b-4556-ae95-adab1fa7aad4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.699810] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d55fe3b-3a7e-493d-b705-10241a2ac392 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.710318] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.721349] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 79b5dc73-2168-46b5-8045-13e92666a69a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.733886] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 413e21a0-5b33-44d1-9964-82e3da294808 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.744757] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 02a2cbdf-c522-4ba5-9914-ccf1c51e0130 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.755108] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1e843567-f13d-40fa-94a0-931655307053 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.770837] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 97a1e2dc-9c4c-419d-87aa-839f21dc6e23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.781764] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 12a02fd4-ae5d-4e7f-96e1-27d50d54bc24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.794416] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3fb9b32d-193f-4968-8783-54d25253f94f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.804784] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 12cd88c0-09c1-42de-8fdd-03139718ca17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.814985] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.828498] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.838641] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 46dd382d-f6d0-4bd5-b027-c07d01a9a595 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 738.838895] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 738.839069] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '3', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_03ab001ffb3148e7bdb7b1d4dfa3ea88': '1', 'io_workload': '10', 'num_proj_ad80964934624fed809a3285c0f1b748': '1', 'num_proj_5a9aa4dbe7894ffebca41f816a4eb2da': '1', 'num_proj_308f1e086ba943b9b9cf27a1da4eda0a': '2', 'num_proj_a5f0b55c024f4143a4e2a141e10c1db3': '1', 'num_proj_19a0cd3d99c04e37aba52b7de50f98c9': '1', 'num_proj_2bb57dd7a5d14a62bc23a9747d5d4ffe': '1', 'num_proj_6179168f3e4f4df09875705a0ee778df': '2'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 739.255868] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdc79f5-9a83-4808-a170-1c7d5ec8de75 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.263806] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52836559-7b8e-4544-9ff3-677f9bcd5f21 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.293053] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d923b39-dd78-459a-8414-856afe501753 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.300918] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44ace61-ddfb-4f96-925d-399e5a0dbb0d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.315461] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.325009] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 739.338931] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 739.339120] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.862s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.335029] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.335029] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.335404] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 740.335404] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 740.359233] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.359415] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.359547] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.359672] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.359796] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.359933] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.360070] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.360281] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.360424] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.360594] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 740.360727] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 740.361216] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.042470] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquiring lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.783304] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "da95334f-ebbf-4a7f-8492-ca310028c4dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.490504] env[61573]: WARNING oslo_vmware.rw_handles [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 751.490504] env[61573]: ERROR oslo_vmware.rw_handles [ 751.491131] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/de40d851-4ac6-4021-8989-e4f8e4e387d5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 751.492695] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 751.492952] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Copying Virtual Disk [datastore2] vmware_temp/de40d851-4ac6-4021-8989-e4f8e4e387d5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/de40d851-4ac6-4021-8989-e4f8e4e387d5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 751.493273] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ea22057-4940-4695-b04c-448bb610661c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.502757] env[61573]: DEBUG oslo_vmware.api [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Waiting for the task: (returnval){ [ 751.502757] env[61573]: value = "task-4836758" [ 751.502757] env[61573]: _type = "Task" [ 751.502757] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.511780] env[61573]: DEBUG oslo_vmware.api [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Task: {'id': task-4836758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.016308] env[61573]: DEBUG oslo_vmware.exceptions [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 752.016308] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.016308] env[61573]: ERROR nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 752.016308] env[61573]: Faults: ['InvalidArgument'] [ 752.016308] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Traceback (most recent call last): [ 752.016308] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 752.016308] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] yield resources [ 752.016308] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 752.016308] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] self.driver.spawn(context, instance, image_meta, [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] self._fetch_image_if_missing(context, vi) [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] image_cache(vi, tmp_image_ds_loc) [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] vm_util.copy_virtual_disk( [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] session._wait_for_task(vmdk_copy_task) [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] return self.wait_for_task(task_ref) [ 752.016656] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] return evt.wait() [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] result = hub.switch() [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] return self.greenlet.switch() [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] self.f(*self.args, **self.kw) [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] raise exceptions.translate_fault(task_info.error) [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Faults: ['InvalidArgument'] [ 752.017040] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] [ 752.017393] env[61573]: INFO nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Terminating instance [ 752.017393] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.017393] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.017393] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e3a7057-582b-40ae-aecf-f404e53bea7f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.019476] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 752.019667] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 752.020487] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9361f2-5b50-4347-b28f-6d899d3e33cb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.029304] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 752.029588] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4606799-8f04-460e-a13a-572d0d3864be {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.032191] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.032191] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 752.033125] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9d4bf99-ae70-4440-9783-520d4c663e22 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.038431] env[61573]: DEBUG oslo_vmware.api [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Waiting for the task: (returnval){ [ 752.038431] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5296731e-864b-90db-04d3-f4af23cc9fda" [ 752.038431] env[61573]: _type = "Task" [ 752.038431] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.046805] env[61573]: DEBUG oslo_vmware.api [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5296731e-864b-90db-04d3-f4af23cc9fda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.103934] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 752.104145] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 752.104331] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Deleting the datastore file [datastore2] 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 752.104605] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f14a90b3-958d-49f2-a21b-752ccac97acc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.111184] env[61573]: DEBUG oslo_vmware.api [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Waiting for the task: (returnval){ [ 752.111184] env[61573]: value = "task-4836760" [ 752.111184] env[61573]: _type = "Task" [ 752.111184] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.119269] env[61573]: DEBUG oslo_vmware.api [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Task: {'id': task-4836760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.551246] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 752.551246] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Creating directory with path [datastore2] vmware_temp/c7619abe-9961-4415-8081-14cc3e13924f/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.551246] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd50fd1c-8c3f-4b76-b4f4-8ac8a38747ed {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.561124] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Created directory with path [datastore2] vmware_temp/c7619abe-9961-4415-8081-14cc3e13924f/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.561400] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Fetch image to [datastore2] vmware_temp/c7619abe-9961-4415-8081-14cc3e13924f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 752.561635] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/c7619abe-9961-4415-8081-14cc3e13924f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 752.562398] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556e1da9-4b6b-4d54-bbab-316024793488 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.569268] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b75f8d5-3ba8-4c53-92cc-997f03affad1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.578639] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ee657f-6d2d-4cb1-8926-5df4ea32078f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.610108] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e55878a-e651-4b91-adb8-989aefafa7be {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.622197] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-75544154-f19b-4247-9b35-e290c3d8ed48 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.623970] env[61573]: DEBUG oslo_vmware.api [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Task: {'id': task-4836760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080592} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.624232] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.624412] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 752.624580] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 752.624746] env[61573]: INFO nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Took 0.61 seconds to destroy the instance on the hypervisor. [ 752.626827] env[61573]: DEBUG nova.compute.claims [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 752.627010] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.627235] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.727544] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 752.798592] env[61573]: DEBUG oslo_vmware.rw_handles [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7619abe-9961-4415-8081-14cc3e13924f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 752.858567] env[61573]: DEBUG oslo_vmware.rw_handles [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 752.858763] env[61573]: DEBUG oslo_vmware.rw_handles [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7619abe-9961-4415-8081-14cc3e13924f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 753.167748] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1caca43-1580-465a-9856-555fe768580d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.177615] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd44cc6-c47e-412d-a6cf-519add1c48cf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.206912] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fa9d59-6357-443b-bba8-a0e53229cd6c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.214222] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50a7525-ba5f-4337-a849-fb17b5edfdf0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.227227] env[61573]: DEBUG nova.compute.provider_tree [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.235910] env[61573]: DEBUG nova.scheduler.client.report [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 753.254109] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.627s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.254634] env[61573]: ERROR nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 753.254634] env[61573]: Faults: ['InvalidArgument'] [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Traceback (most recent call last): [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] self.driver.spawn(context, instance, image_meta, [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] self._fetch_image_if_missing(context, vi) [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] image_cache(vi, tmp_image_ds_loc) [ 753.254634] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] vm_util.copy_virtual_disk( [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] session._wait_for_task(vmdk_copy_task) [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] return self.wait_for_task(task_ref) [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] return evt.wait() [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] result = hub.switch() [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] return self.greenlet.switch() [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 753.255026] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] self.f(*self.args, **self.kw) [ 753.255453] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 753.255453] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] raise exceptions.translate_fault(task_info.error) [ 753.255453] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 753.255453] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Faults: ['InvalidArgument'] [ 753.255453] env[61573]: ERROR nova.compute.manager [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] [ 753.255453] env[61573]: DEBUG nova.compute.utils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 753.256948] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Build of instance 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af was re-scheduled: A specified parameter was not correct: fileType [ 753.256948] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 753.257337] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 753.257513] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 753.257849] env[61573]: DEBUG nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 753.258028] env[61573]: DEBUG nova.network.neutron [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 753.752116] env[61573]: DEBUG nova.network.neutron [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.763982] env[61573]: INFO nova.compute.manager [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Took 0.51 seconds to deallocate network for instance. [ 753.868774] env[61573]: INFO nova.scheduler.client.report [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Deleted allocations for instance 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af [ 753.894029] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f102938-05f9-495a-9acc-2aa38aa7bd25 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.848s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.897021] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.852s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.897021] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Acquiring lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.897021] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.897309] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.897545] env[61573]: INFO nova.compute.manager [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Terminating instance [ 753.899720] env[61573]: DEBUG nova.compute.manager [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 753.899913] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 753.900186] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78a07ca9-e1ac-472b-9979-40c62c5a9b65 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.911802] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243f7bd6-daa6-440f-8f33-648b9d537ab2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.924117] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 753.947281] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af could not be found. [ 753.947496] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 753.947673] env[61573]: INFO nova.compute.manager [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Took 0.05 seconds to destroy the instance on the hypervisor. [ 753.947937] env[61573]: DEBUG oslo.service.loopingcall [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 753.948174] env[61573]: DEBUG nova.compute.manager [-] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 753.948271] env[61573]: DEBUG nova.network.neutron [-] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 753.973672] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.973931] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.975439] env[61573]: INFO nova.compute.claims [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.986799] env[61573]: DEBUG nova.network.neutron [-] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.995211] env[61573]: INFO nova.compute.manager [-] [instance: 72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af] Took 0.05 seconds to deallocate network for instance. [ 754.118411] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5d349a0f-5984-4b33-a0f9-778abc99d462 tempest-ServerDiagnosticsTest-730848149 tempest-ServerDiagnosticsTest-730848149-project-member] Lock "72dd6ff6-43bb-4b4e-9d3d-7d89ad4d54af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.223s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.469260] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc15454-6f6b-4380-8680-fc7e18fe880e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.479781] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3be6aec-77fd-4933-b71d-f9903bd54580 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.514206] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d169b40f-87de-499a-bd86-45ab31df8863 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.523062] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c33768b-cae4-4677-b2aa-cd5b27ae924a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.536186] env[61573]: DEBUG nova.compute.provider_tree [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.547960] env[61573]: DEBUG nova.scheduler.client.report [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 754.566354] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.592s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.566662] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 754.603296] env[61573]: DEBUG nova.compute.utils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 754.604678] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 754.604906] env[61573]: DEBUG nova.network.neutron [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 754.615083] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 754.686861] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 754.706461] env[61573]: DEBUG nova.policy [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '233627a9649f45539ba8d8521ca7c9ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58b3da30203f416e9b30c28a291d7b4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 754.719103] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 754.719103] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 754.719103] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 754.719296] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 754.719431] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 754.719624] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 754.719864] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 754.720087] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 754.720312] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 754.720548] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 754.720901] env[61573]: DEBUG nova.virt.hardware [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 754.722287] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6a55dc-91ac-4019-abc5-2dc1a0cb004b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.731322] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9492c7-21b3-4668-a1f9-43245b253f10 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.289544] env[61573]: DEBUG nova.network.neutron [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Successfully created port: 1b88a206-aa18-4d1a-b1b0-923bea7ae63b {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.188793] env[61573]: DEBUG nova.compute.manager [req-2ef6929d-a116-4b58-af5d-e53fec754a8d req-1b28e658-07f0-4140-a118-0d2eb30d1e39 service nova] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Received event network-vif-plugged-1b88a206-aa18-4d1a-b1b0-923bea7ae63b {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 756.188793] env[61573]: DEBUG oslo_concurrency.lockutils [req-2ef6929d-a116-4b58-af5d-e53fec754a8d req-1b28e658-07f0-4140-a118-0d2eb30d1e39 service nova] Acquiring lock "8c57d493-12c4-47fe-a355-c9ade98b7158-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.188793] env[61573]: DEBUG oslo_concurrency.lockutils [req-2ef6929d-a116-4b58-af5d-e53fec754a8d req-1b28e658-07f0-4140-a118-0d2eb30d1e39 service nova] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.188793] env[61573]: DEBUG oslo_concurrency.lockutils [req-2ef6929d-a116-4b58-af5d-e53fec754a8d req-1b28e658-07f0-4140-a118-0d2eb30d1e39 service nova] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.188942] env[61573]: DEBUG nova.compute.manager [req-2ef6929d-a116-4b58-af5d-e53fec754a8d req-1b28e658-07f0-4140-a118-0d2eb30d1e39 service nova] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] No waiting events found dispatching network-vif-plugged-1b88a206-aa18-4d1a-b1b0-923bea7ae63b {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 756.188942] env[61573]: WARNING nova.compute.manager [req-2ef6929d-a116-4b58-af5d-e53fec754a8d req-1b28e658-07f0-4140-a118-0d2eb30d1e39 service nova] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Received unexpected event network-vif-plugged-1b88a206-aa18-4d1a-b1b0-923bea7ae63b for instance with vm_state building and task_state spawning. [ 756.214270] env[61573]: DEBUG nova.network.neutron [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Successfully updated port: 1b88a206-aa18-4d1a-b1b0-923bea7ae63b {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 756.230105] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquiring lock "refresh_cache-8c57d493-12c4-47fe-a355-c9ade98b7158" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.230105] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquired lock "refresh_cache-8c57d493-12c4-47fe-a355-c9ade98b7158" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.230105] env[61573]: DEBUG nova.network.neutron [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 756.291613] env[61573]: DEBUG nova.network.neutron [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.584628] env[61573]: DEBUG nova.network.neutron [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Updating instance_info_cache with network_info: [{"id": "1b88a206-aa18-4d1a-b1b0-923bea7ae63b", "address": "fa:16:3e:2c:2d:f2", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b88a206-aa", "ovs_interfaceid": "1b88a206-aa18-4d1a-b1b0-923bea7ae63b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.598851] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Releasing lock "refresh_cache-8c57d493-12c4-47fe-a355-c9ade98b7158" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.599216] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Instance network_info: |[{"id": "1b88a206-aa18-4d1a-b1b0-923bea7ae63b", "address": "fa:16:3e:2c:2d:f2", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b88a206-aa", "ovs_interfaceid": "1b88a206-aa18-4d1a-b1b0-923bea7ae63b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 756.600829] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:2d:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4c015b-4a8b-46ca-9556-74bad8db9fb3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b88a206-aa18-4d1a-b1b0-923bea7ae63b', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.607766] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Creating folder: Project (58b3da30203f416e9b30c28a291d7b4e). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 756.607766] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c3b97a3-2792-4243-9d09-8411d220c345 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.681591] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Created folder: Project (58b3da30203f416e9b30c28a291d7b4e) in parent group-v942801. [ 756.681716] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Creating folder: Instances. Parent ref: group-v942841. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 756.683026] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0926690a-cafd-45d8-ad43-f9d9f7508013 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.691388] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Created folder: Instances in parent group-v942841. [ 756.691672] env[61573]: DEBUG oslo.service.loopingcall [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.691867] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 756.692088] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed467820-dc17-452b-9043-d3cbe8f58cbf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.713427] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.713427] env[61573]: value = "task-4836763" [ 756.713427] env[61573]: _type = "Task" [ 756.713427] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.722285] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836763, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.224837] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836763, 'name': CreateVM_Task, 'duration_secs': 0.304502} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.225080] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 757.226093] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.226315] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.226647] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 757.226911] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33917352-2452-4f0c-8716-69005bb80046 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.232916] env[61573]: DEBUG oslo_vmware.api [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Waiting for the task: (returnval){ [ 757.232916] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5218366e-4e31-f7f9-c573-c10bcf8e3d03" [ 757.232916] env[61573]: _type = "Task" [ 757.232916] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.242983] env[61573]: DEBUG oslo_vmware.api [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5218366e-4e31-f7f9-c573-c10bcf8e3d03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.275587] env[61573]: DEBUG oslo_concurrency.lockutils [None req-950ce301-d7c2-4e38-8710-57b5fc928e59 tempest-TenantUsagesTestJSON-2084051121 tempest-TenantUsagesTestJSON-2084051121-project-member] Acquiring lock "05122510-519e-43f3-96bf-51801559a5be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.275815] env[61573]: DEBUG oslo_concurrency.lockutils [None req-950ce301-d7c2-4e38-8710-57b5fc928e59 tempest-TenantUsagesTestJSON-2084051121 tempest-TenantUsagesTestJSON-2084051121-project-member] Lock "05122510-519e-43f3-96bf-51801559a5be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.744565] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.744920] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.745042] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.246439] env[61573]: DEBUG nova.compute.manager [req-db9d0ae8-bcfd-45a5-971b-e1f86c0ac41f req-89b17c6d-6006-4c33-bfe8-f214846910d5 service nova] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Received event network-changed-1b88a206-aa18-4d1a-b1b0-923bea7ae63b {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 758.246677] env[61573]: DEBUG nova.compute.manager [req-db9d0ae8-bcfd-45a5-971b-e1f86c0ac41f req-89b17c6d-6006-4c33-bfe8-f214846910d5 service nova] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Refreshing instance network info cache due to event network-changed-1b88a206-aa18-4d1a-b1b0-923bea7ae63b. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 758.246908] env[61573]: DEBUG oslo_concurrency.lockutils [req-db9d0ae8-bcfd-45a5-971b-e1f86c0ac41f req-89b17c6d-6006-4c33-bfe8-f214846910d5 service nova] Acquiring lock "refresh_cache-8c57d493-12c4-47fe-a355-c9ade98b7158" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.247156] env[61573]: DEBUG oslo_concurrency.lockutils [req-db9d0ae8-bcfd-45a5-971b-e1f86c0ac41f req-89b17c6d-6006-4c33-bfe8-f214846910d5 service nova] Acquired lock "refresh_cache-8c57d493-12c4-47fe-a355-c9ade98b7158" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.248023] env[61573]: DEBUG nova.network.neutron [req-db9d0ae8-bcfd-45a5-971b-e1f86c0ac41f req-89b17c6d-6006-4c33-bfe8-f214846910d5 service nova] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Refreshing network info cache for port 1b88a206-aa18-4d1a-b1b0-923bea7ae63b {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 758.588200] env[61573]: DEBUG nova.network.neutron [req-db9d0ae8-bcfd-45a5-971b-e1f86c0ac41f req-89b17c6d-6006-4c33-bfe8-f214846910d5 service nova] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Updated VIF entry in instance network info cache for port 1b88a206-aa18-4d1a-b1b0-923bea7ae63b. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 758.588200] env[61573]: DEBUG nova.network.neutron [req-db9d0ae8-bcfd-45a5-971b-e1f86c0ac41f req-89b17c6d-6006-4c33-bfe8-f214846910d5 service nova] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Updating instance_info_cache with network_info: [{"id": "1b88a206-aa18-4d1a-b1b0-923bea7ae63b", "address": "fa:16:3e:2c:2d:f2", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b88a206-aa", "ovs_interfaceid": "1b88a206-aa18-4d1a-b1b0-923bea7ae63b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.597918] env[61573]: DEBUG oslo_concurrency.lockutils [req-db9d0ae8-bcfd-45a5-971b-e1f86c0ac41f req-89b17c6d-6006-4c33-bfe8-f214846910d5 service nova] Releasing lock "refresh_cache-8c57d493-12c4-47fe-a355-c9ade98b7158" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.085303] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquiring lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.570373] env[61573]: DEBUG oslo_concurrency.lockutils [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.056610] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquiring lock "c445065e-68e5-4dda-ba5d-314dc2da12cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.490759] env[61573]: DEBUG oslo_concurrency.lockutils [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquiring lock "4c70f154-7d65-4cea-ad90-8626f58b70f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.920777] env[61573]: DEBUG oslo_concurrency.lockutils [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquiring lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.496392] env[61573]: DEBUG oslo_concurrency.lockutils [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquiring lock "8c57d493-12c4-47fe-a355-c9ade98b7158" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.109791] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.015876] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.016237] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.154759] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d0550374-d99f-4373-9a88-7d2fda0a3bcd tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "79a02472-a364-42f6-9fe0-c030df8436b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.154759] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d0550374-d99f-4373-9a88-7d2fda0a3bcd tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "79a02472-a364-42f6-9fe0-c030df8436b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.404050] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.404050] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11295}} [ 795.420913] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] There are 0 instances to clean {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11304}} [ 795.421148] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.421383] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances with incomplete migration {{(pid=61573) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11333}} [ 795.447280] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.480622] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5d49b678-8da3-42ff-b97a-20a86535fb5e tempest-ServersAaction247Test-1702220296 tempest-ServersAaction247Test-1702220296-project-member] Acquiring lock "80e12c49-e98a-4ece-a080-783f99fccabc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.480795] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5d49b678-8da3-42ff-b97a-20a86535fb5e tempest-ServersAaction247Test-1702220296 tempest-ServersAaction247Test-1702220296-project-member] Lock "80e12c49-e98a-4ece-a080-783f99fccabc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.653867] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f40cc3fd-3490-48ab-8e08-202fc3cc4f27 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "57358b9b-2bf1-47d9-a28c-7d45770604d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.654117] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f40cc3fd-3490-48ab-8e08-202fc3cc4f27 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "57358b9b-2bf1-47d9-a28c-7d45770604d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.462542] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 796.462542] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 798.260401] env[61573]: DEBUG oslo_concurrency.lockutils [None req-da3bd14a-c736-477d-86bd-ea11a1c316d4 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] Acquiring lock "5c09f011-1f2b-41f9-b1e3-1dee4e5b85d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.260401] env[61573]: DEBUG oslo_concurrency.lockutils [None req-da3bd14a-c736-477d-86bd-ea11a1c316d4 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] Lock "5c09f011-1f2b-41f9-b1e3-1dee4e5b85d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.403313] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.403639] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.403860] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.416274] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.416567] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.416739] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.416926] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 798.418606] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65d85e9-bd5c-4af4-8844-aae4d52d61dd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.428662] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf61901-a3c4-4b5a-b2d0-ac9b3e76d03a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.446843] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f50ef2-18ce-49a6-a6a9-6c91fbc5dd02 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.458036] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bcd918-5dbf-4e60-a75a-f7fbca45138a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.493016] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180565MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 798.493452] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.493870] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.612776] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance da95334f-ebbf-4a7f-8492-ca310028c4dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.612776] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 9cb4ec1b-e422-491d-8ee9-0103c740ea5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.612776] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.612776] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a9cca36b-ee0e-42b3-9c26-61c9b0715312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.612944] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c445065e-68e5-4dda-ba5d-314dc2da12cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.612944] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.612944] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.612944] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.613082] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.613082] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 798.628530] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 65a29af2-4c3b-4556-ae95-adab1fa7aad4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.644990] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d55fe3b-3a7e-493d-b705-10241a2ac392 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.658282] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.674733] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 79b5dc73-2168-46b5-8045-13e92666a69a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.689391] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 413e21a0-5b33-44d1-9964-82e3da294808 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.705104] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 02a2cbdf-c522-4ba5-9914-ccf1c51e0130 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.717611] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1e843567-f13d-40fa-94a0-931655307053 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.758298] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 97a1e2dc-9c4c-419d-87aa-839f21dc6e23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.771783] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 12a02fd4-ae5d-4e7f-96e1-27d50d54bc24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.788938] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3fb9b32d-193f-4968-8783-54d25253f94f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.809120] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 12cd88c0-09c1-42de-8fdd-03139718ca17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.823365] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.837209] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.849286] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 46dd382d-f6d0-4bd5-b027-c07d01a9a595 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.863775] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 05122510-519e-43f3-96bf-51801559a5be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.876424] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.888866] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 79a02472-a364-42f6-9fe0-c030df8436b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.902143] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 80e12c49-e98a-4ece-a080-783f99fccabc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.917491] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 57358b9b-2bf1-47d9-a28c-7d45770604d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.932416] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5c09f011-1f2b-41f9-b1e3-1dee4e5b85d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.932684] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 798.932851] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '4', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_ad80964934624fed809a3285c0f1b748': '1', 'io_workload': '10', 'num_proj_5a9aa4dbe7894ffebca41f816a4eb2da': '1', 'num_task_spawning': '2', 'num_proj_308f1e086ba943b9b9cf27a1da4eda0a': '2', 'num_proj_a5f0b55c024f4143a4e2a141e10c1db3': '1', 'num_proj_19a0cd3d99c04e37aba52b7de50f98c9': '1', 'num_proj_2bb57dd7a5d14a62bc23a9747d5d4ffe': '1', 'num_proj_6179168f3e4f4df09875705a0ee778df': '2', 'num_proj_58b3da30203f416e9b30c28a291d7b4e': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 799.446791] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0b4b57-50f9-4762-95b8-acfaca19983b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.461465] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a49f10-cb78-4e4d-a08f-0c32c18cfbdb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.505188] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bc2582-309e-4e37-98bb-1af9c04a1ece {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.513977] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d69e37d-7546-49fe-90d1-a0d061f2bba5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.532016] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.545988] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 799.575905] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 799.576917] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.083s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.501502] env[61573]: DEBUG oslo_concurrency.lockutils [None req-05f06731-ecad-4b60-8411-5a9f29749070 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] Acquiring lock "5e637ba3-4faf-45f0-9454-dc38d14756c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.503113] env[61573]: DEBUG oslo_concurrency.lockutils [None req-05f06731-ecad-4b60-8411-5a9f29749070 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] Lock "5e637ba3-4faf-45f0-9454-dc38d14756c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.577474] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.577825] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.578030] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.578218] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.081228] env[61573]: WARNING oslo_vmware.rw_handles [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 801.081228] env[61573]: ERROR oslo_vmware.rw_handles [ 801.081228] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/c7619abe-9961-4415-8081-14cc3e13924f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 801.081819] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 801.081858] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Copying Virtual Disk [datastore2] vmware_temp/c7619abe-9961-4415-8081-14cc3e13924f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/c7619abe-9961-4415-8081-14cc3e13924f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 801.082193] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c221a1fb-065c-44be-89fc-53bb59227d45 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.092052] env[61573]: DEBUG oslo_vmware.api [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Waiting for the task: (returnval){ [ 801.092052] env[61573]: value = "task-4836764" [ 801.092052] env[61573]: _type = "Task" [ 801.092052] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.103020] env[61573]: DEBUG oslo_vmware.api [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Task: {'id': task-4836764, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.403982] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.404184] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 801.404309] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 801.431922] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.432128] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.432267] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.432392] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.432513] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.432633] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.432750] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.432868] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.432985] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.433114] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 801.433241] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 801.603453] env[61573]: DEBUG oslo_vmware.exceptions [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 801.603754] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.604350] env[61573]: ERROR nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 801.604350] env[61573]: Faults: ['InvalidArgument'] [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Traceback (most recent call last): [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] yield resources [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] self.driver.spawn(context, instance, image_meta, [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] self._fetch_image_if_missing(context, vi) [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 801.604350] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] image_cache(vi, tmp_image_ds_loc) [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] vm_util.copy_virtual_disk( [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] session._wait_for_task(vmdk_copy_task) [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] return self.wait_for_task(task_ref) [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] return evt.wait() [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] result = hub.switch() [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] return self.greenlet.switch() [ 801.604781] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 801.605154] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] self.f(*self.args, **self.kw) [ 801.605154] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 801.605154] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] raise exceptions.translate_fault(task_info.error) [ 801.605154] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 801.605154] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Faults: ['InvalidArgument'] [ 801.605154] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] [ 801.605154] env[61573]: INFO nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Terminating instance [ 801.609856] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 801.609944] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 801.610234] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.610425] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.611453] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f94acc7-a719-4ddc-aa48-c7c6afceafd8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.619021] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b73ad210-a603-400c-bec7-a60485f8d449 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.625254] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 801.625520] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8902dcef-8e5e-4b20-bfdf-e9a22b7d0263 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.629747] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.629920] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 801.630664] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d6da892-65c2-463b-8fb6-39c7aa01a48a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.637990] env[61573]: DEBUG oslo_vmware.api [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Waiting for the task: (returnval){ [ 801.637990] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524e71bb-e3cb-576b-ea5a-e1796ac16523" [ 801.637990] env[61573]: _type = "Task" [ 801.637990] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.650022] env[61573]: DEBUG oslo_vmware.api [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524e71bb-e3cb-576b-ea5a-e1796ac16523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.710559] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 801.710559] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 801.710559] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Deleting the datastore file [datastore2] da95334f-ebbf-4a7f-8492-ca310028c4dd {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.710786] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35e8dd04-10d4-4b0a-a2f8-2d1b9dbd770f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.717334] env[61573]: DEBUG oslo_vmware.api [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Waiting for the task: (returnval){ [ 801.717334] env[61573]: value = "task-4836766" [ 801.717334] env[61573]: _type = "Task" [ 801.717334] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.727414] env[61573]: DEBUG oslo_vmware.api [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Task: {'id': task-4836766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.148958] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 802.149261] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Creating directory with path [datastore2] vmware_temp/3a37567e-c5d5-48c9-b999-85f42b1c4824/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.149512] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88cf3703-1660-4f2b-a34e-962622899a96 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.164149] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Created directory with path [datastore2] vmware_temp/3a37567e-c5d5-48c9-b999-85f42b1c4824/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.164402] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Fetch image to [datastore2] vmware_temp/3a37567e-c5d5-48c9-b999-85f42b1c4824/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 802.164581] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/3a37567e-c5d5-48c9-b999-85f42b1c4824/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 802.165420] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0771ad1b-32a3-40b7-8394-d246f8eba5b2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.176437] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d228aa0-cbf7-4ab2-809e-b283fb076f26 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.186898] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f63a38d-e5de-424a-bb94-3b7d8a5ff060 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.226904] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d0741b-e782-4075-8ae7-f082d115655e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.237637] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ca5541cc-8bb4-4703-b0d2-814a03516f35 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.242263] env[61573]: DEBUG oslo_vmware.api [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Task: {'id': task-4836766, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120294} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.242263] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 802.242263] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 802.242263] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 802.242263] env[61573]: INFO nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Took 0.63 seconds to destroy the instance on the hypervisor. [ 802.244244] env[61573]: DEBUG nova.compute.claims [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 802.244244] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.244244] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.268584] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 802.354046] env[61573]: DEBUG oslo_vmware.rw_handles [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3a37567e-c5d5-48c9-b999-85f42b1c4824/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 802.425186] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6bed70b-24d5-4654-a6d2-9c055867ccb6 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] Acquiring lock "18ee5e56-b3d5-4152-a825-d2f814589d43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.425457] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6bed70b-24d5-4654-a6d2-9c055867ccb6 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] Lock "18ee5e56-b3d5-4152-a825-d2f814589d43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.427751] env[61573]: DEBUG oslo_vmware.rw_handles [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 802.428604] env[61573]: DEBUG oslo_vmware.rw_handles [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3a37567e-c5d5-48c9-b999-85f42b1c4824/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 802.448625] env[61573]: DEBUG nova.scheduler.client.report [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Refreshing inventories for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 802.471053] env[61573]: DEBUG nova.scheduler.client.report [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Updating ProviderTree inventory for provider b1eff98b-2b30-4574-a87d-d151235a2dba from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 802.471053] env[61573]: DEBUG nova.compute.provider_tree [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Updating inventory in ProviderTree for provider b1eff98b-2b30-4574-a87d-d151235a2dba with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 802.494553] env[61573]: DEBUG nova.scheduler.client.report [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Refreshing aggregate associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, aggregates: None {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 802.524052] env[61573]: DEBUG nova.scheduler.client.report [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Refreshing trait associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 803.044297] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfb9243-0af8-47bf-b3d6-ba5d9f2ca39d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.057493] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00ad097-8915-443b-869a-300112f48a48 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.096958] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76909df0-13cd-48d7-9203-ad982cd9e3d7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.106176] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3e313e-7069-4b03-8d25-1c7b5c742e20 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.123095] env[61573]: DEBUG nova.compute.provider_tree [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.133250] env[61573]: DEBUG nova.scheduler.client.report [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 803.154509] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.910s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.155463] env[61573]: ERROR nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 803.155463] env[61573]: Faults: ['InvalidArgument'] [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Traceback (most recent call last): [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] self.driver.spawn(context, instance, image_meta, [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] self._fetch_image_if_missing(context, vi) [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] image_cache(vi, tmp_image_ds_loc) [ 803.155463] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] vm_util.copy_virtual_disk( [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] session._wait_for_task(vmdk_copy_task) [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] return self.wait_for_task(task_ref) [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] return evt.wait() [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] result = hub.switch() [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] return self.greenlet.switch() [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 803.155982] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] self.f(*self.args, **self.kw) [ 803.156467] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 803.156467] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] raise exceptions.translate_fault(task_info.error) [ 803.156467] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 803.156467] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Faults: ['InvalidArgument'] [ 803.156467] env[61573]: ERROR nova.compute.manager [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] [ 803.156467] env[61573]: DEBUG nova.compute.utils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 803.157986] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Build of instance da95334f-ebbf-4a7f-8492-ca310028c4dd was re-scheduled: A specified parameter was not correct: fileType [ 803.157986] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 803.158423] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 803.158646] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 803.158831] env[61573]: DEBUG nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 803.158997] env[61573]: DEBUG nova.network.neutron [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.098999] env[61573]: DEBUG nova.network.neutron [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.114830] env[61573]: INFO nova.compute.manager [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Took 0.96 seconds to deallocate network for instance. [ 804.302839] env[61573]: INFO nova.scheduler.client.report [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Deleted allocations for instance da95334f-ebbf-4a7f-8492-ca310028c4dd [ 804.337225] env[61573]: DEBUG oslo_concurrency.lockutils [None req-453a20de-cb1d-4c88-b017-a850ab25d194 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "da95334f-ebbf-4a7f-8492-ca310028c4dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 252.985s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.337225] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "da95334f-ebbf-4a7f-8492-ca310028c4dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 54.553s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.337225] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "da95334f-ebbf-4a7f-8492-ca310028c4dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.337225] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "da95334f-ebbf-4a7f-8492-ca310028c4dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.337563] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "da95334f-ebbf-4a7f-8492-ca310028c4dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.339532] env[61573]: INFO nova.compute.manager [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Terminating instance [ 804.343494] env[61573]: DEBUG nova.compute.manager [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 804.344029] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 804.346700] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d71cf662-e7ca-414e-bad9-5fa9f73e7614 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.355391] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183f80a8-4e3f-42b9-bf99-80f8083c05cc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.367791] env[61573]: DEBUG nova.compute.manager [None req-d74865c9-332d-41a8-ae31-2b7e7832351e tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 549dbec8-1008-4930-8e2d-97cd11cfa45f] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 804.395940] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance da95334f-ebbf-4a7f-8492-ca310028c4dd could not be found. [ 804.396181] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 804.396393] env[61573]: INFO nova.compute.manager [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Took 0.05 seconds to destroy the instance on the hypervisor. [ 804.396615] env[61573]: DEBUG oslo.service.loopingcall [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.396897] env[61573]: DEBUG nova.compute.manager [-] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 804.396982] env[61573]: DEBUG nova.network.neutron [-] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.413287] env[61573]: DEBUG nova.compute.manager [None req-d74865c9-332d-41a8-ae31-2b7e7832351e tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 549dbec8-1008-4930-8e2d-97cd11cfa45f] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 804.440731] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d74865c9-332d-41a8-ae31-2b7e7832351e tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "549dbec8-1008-4930-8e2d-97cd11cfa45f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.530s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.461164] env[61573]: DEBUG nova.compute.manager [None req-19346093-52ce-472e-8389-2aa8ed3fd1b7 tempest-VolumesAssistedSnapshotsTest-685401197 tempest-VolumesAssistedSnapshotsTest-685401197-project-member] [instance: fe988112-9ee6-4fa7-8914-87119aaa2549] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 804.465879] env[61573]: DEBUG nova.network.neutron [-] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.488830] env[61573]: INFO nova.compute.manager [-] [instance: da95334f-ebbf-4a7f-8492-ca310028c4dd] Took 0.09 seconds to deallocate network for instance. [ 804.492332] env[61573]: DEBUG nova.compute.manager [None req-19346093-52ce-472e-8389-2aa8ed3fd1b7 tempest-VolumesAssistedSnapshotsTest-685401197 tempest-VolumesAssistedSnapshotsTest-685401197-project-member] [instance: fe988112-9ee6-4fa7-8914-87119aaa2549] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 804.529031] env[61573]: DEBUG oslo_concurrency.lockutils [None req-19346093-52ce-472e-8389-2aa8ed3fd1b7 tempest-VolumesAssistedSnapshotsTest-685401197 tempest-VolumesAssistedSnapshotsTest-685401197-project-member] Lock "fe988112-9ee6-4fa7-8914-87119aaa2549" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.491s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.540082] env[61573]: DEBUG nova.compute.manager [None req-c4e48610-55b2-48e1-925f-0c4704229484 tempest-ServersWithSpecificFlavorTestJSON-785244569 tempest-ServersWithSpecificFlavorTestJSON-785244569-project-member] [instance: 91def60e-ad74-435f-a006-eb5672928010] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 804.578545] env[61573]: DEBUG nova.compute.manager [None req-c4e48610-55b2-48e1-925f-0c4704229484 tempest-ServersWithSpecificFlavorTestJSON-785244569 tempest-ServersWithSpecificFlavorTestJSON-785244569-project-member] [instance: 91def60e-ad74-435f-a006-eb5672928010] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 804.613718] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c4e48610-55b2-48e1-925f-0c4704229484 tempest-ServersWithSpecificFlavorTestJSON-785244569 tempest-ServersWithSpecificFlavorTestJSON-785244569-project-member] Lock "91def60e-ad74-435f-a006-eb5672928010" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.474s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.645399] env[61573]: DEBUG nova.compute.manager [None req-792c39a7-9da0-4132-af21-92e184faab4f tempest-ServerAddressesNegativeTestJSON-2109027869 tempest-ServerAddressesNegativeTestJSON-2109027869-project-member] [instance: 643dbc96-7e32-4442-ab44-cbd74b0ffd72] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 804.690263] env[61573]: DEBUG nova.compute.manager [None req-792c39a7-9da0-4132-af21-92e184faab4f tempest-ServerAddressesNegativeTestJSON-2109027869 tempest-ServerAddressesNegativeTestJSON-2109027869-project-member] [instance: 643dbc96-7e32-4442-ab44-cbd74b0ffd72] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 804.691492] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a901e285-06d6-4ce7-bd49-ddf613f58adb tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "da95334f-ebbf-4a7f-8492-ca310028c4dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.356s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.722236] env[61573]: DEBUG oslo_concurrency.lockutils [None req-792c39a7-9da0-4132-af21-92e184faab4f tempest-ServerAddressesNegativeTestJSON-2109027869 tempest-ServerAddressesNegativeTestJSON-2109027869-project-member] Lock "643dbc96-7e32-4442-ab44-cbd74b0ffd72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.014s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.729329] env[61573]: DEBUG nova.compute.manager [None req-24abebe1-1b99-4d94-bf50-5e3952100ef5 tempest-ServersAdmin275Test-1369214867 tempest-ServersAdmin275Test-1369214867-project-member] [instance: f42d10b1-6733-4c39-9230-fd41dd82bb20] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 804.758225] env[61573]: DEBUG nova.compute.manager [None req-24abebe1-1b99-4d94-bf50-5e3952100ef5 tempest-ServersAdmin275Test-1369214867 tempest-ServersAdmin275Test-1369214867-project-member] [instance: f42d10b1-6733-4c39-9230-fd41dd82bb20] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 804.786616] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24abebe1-1b99-4d94-bf50-5e3952100ef5 tempest-ServersAdmin275Test-1369214867 tempest-ServersAdmin275Test-1369214867-project-member] Lock "f42d10b1-6733-4c39-9230-fd41dd82bb20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.296s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.798083] env[61573]: DEBUG nova.compute.manager [None req-fbeb75b7-ced4-4e0c-9bd4-a20ce96a050d tempest-ServerDiagnosticsV248Test-403520221 tempest-ServerDiagnosticsV248Test-403520221-project-member] [instance: fbd512f6-5c22-4a40-846c-0fece5ee7bcb] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 804.832128] env[61573]: DEBUG nova.compute.manager [None req-fbeb75b7-ced4-4e0c-9bd4-a20ce96a050d tempest-ServerDiagnosticsV248Test-403520221 tempest-ServerDiagnosticsV248Test-403520221-project-member] [instance: fbd512f6-5c22-4a40-846c-0fece5ee7bcb] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 804.861869] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fbeb75b7-ced4-4e0c-9bd4-a20ce96a050d tempest-ServerDiagnosticsV248Test-403520221 tempest-ServerDiagnosticsV248Test-403520221-project-member] Lock "fbd512f6-5c22-4a40-846c-0fece5ee7bcb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.821s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.873546] env[61573]: DEBUG nova.compute.manager [None req-1e9c79b5-0651-4ca7-a7fe-02f52d99afe6 tempest-InstanceActionsNegativeTestJSON-465330128 tempest-InstanceActionsNegativeTestJSON-465330128-project-member] [instance: 37256e1b-8191-4bee-a3c3-dd31c99d9301] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 804.908306] env[61573]: DEBUG nova.compute.manager [None req-1e9c79b5-0651-4ca7-a7fe-02f52d99afe6 tempest-InstanceActionsNegativeTestJSON-465330128 tempest-InstanceActionsNegativeTestJSON-465330128-project-member] [instance: 37256e1b-8191-4bee-a3c3-dd31c99d9301] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 804.936816] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1e9c79b5-0651-4ca7-a7fe-02f52d99afe6 tempest-InstanceActionsNegativeTestJSON-465330128 tempest-InstanceActionsNegativeTestJSON-465330128-project-member] Lock "37256e1b-8191-4bee-a3c3-dd31c99d9301" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.874s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.950355] env[61573]: DEBUG nova.compute.manager [None req-244ff0f2-ebc7-430a-8f38-7f56f8e2fb66 tempest-ServersTestFqdnHostnames-50427058 tempest-ServersTestFqdnHostnames-50427058-project-member] [instance: 3d0bfbba-2fec-4063-91f3-3152b3ea537e] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 804.981348] env[61573]: DEBUG nova.compute.manager [None req-244ff0f2-ebc7-430a-8f38-7f56f8e2fb66 tempest-ServersTestFqdnHostnames-50427058 tempest-ServersTestFqdnHostnames-50427058-project-member] [instance: 3d0bfbba-2fec-4063-91f3-3152b3ea537e] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 805.009993] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244ff0f2-ebc7-430a-8f38-7f56f8e2fb66 tempest-ServersTestFqdnHostnames-50427058 tempest-ServersTestFqdnHostnames-50427058-project-member] Lock "3d0bfbba-2fec-4063-91f3-3152b3ea537e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.928s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.025494] env[61573]: DEBUG nova.compute.manager [None req-a8c903a3-f440-48fb-8c3c-85e5459c1e36 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: a8b2d4b2-731f-489a-8b11-c816e6827189] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 805.061447] env[61573]: DEBUG nova.compute.manager [None req-a8c903a3-f440-48fb-8c3c-85e5459c1e36 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: a8b2d4b2-731f-489a-8b11-c816e6827189] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 805.101148] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8c903a3-f440-48fb-8c3c-85e5459c1e36 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "a8b2d4b2-731f-489a-8b11-c816e6827189" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.206s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.112538] env[61573]: DEBUG nova.compute.manager [None req-9951a5bd-3235-4023-8b41-796e782a1017 tempest-ServerMetadataTestJSON-759562833 tempest-ServerMetadataTestJSON-759562833-project-member] [instance: 65a29af2-4c3b-4556-ae95-adab1fa7aad4] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 805.138511] env[61573]: DEBUG nova.compute.manager [None req-9951a5bd-3235-4023-8b41-796e782a1017 tempest-ServerMetadataTestJSON-759562833 tempest-ServerMetadataTestJSON-759562833-project-member] [instance: 65a29af2-4c3b-4556-ae95-adab1fa7aad4] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 805.166378] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9951a5bd-3235-4023-8b41-796e782a1017 tempest-ServerMetadataTestJSON-759562833 tempest-ServerMetadataTestJSON-759562833-project-member] Lock "65a29af2-4c3b-4556-ae95-adab1fa7aad4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.655s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.179156] env[61573]: DEBUG nova.compute.manager [None req-98aa3695-9340-4318-b5fc-4eef6a9cc7e8 tempest-ServersNegativeTestJSON-472036444 tempest-ServersNegativeTestJSON-472036444-project-member] [instance: 0d55fe3b-3a7e-493d-b705-10241a2ac392] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 805.208057] env[61573]: DEBUG nova.compute.manager [None req-98aa3695-9340-4318-b5fc-4eef6a9cc7e8 tempest-ServersNegativeTestJSON-472036444 tempest-ServersNegativeTestJSON-472036444-project-member] [instance: 0d55fe3b-3a7e-493d-b705-10241a2ac392] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 805.241331] env[61573]: DEBUG oslo_concurrency.lockutils [None req-98aa3695-9340-4318-b5fc-4eef6a9cc7e8 tempest-ServersNegativeTestJSON-472036444 tempest-ServersNegativeTestJSON-472036444-project-member] Lock "0d55fe3b-3a7e-493d-b705-10241a2ac392" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.833s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.256188] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 805.345041] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.345041] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.345636] env[61573]: INFO nova.compute.claims [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.922024] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed64964-370b-4f88-8c9c-39efe692afc8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.932040] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09edd4ec-73ce-410c-a2a5-7791b285cf56 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.965461] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f33768-fec5-415d-aae2-5bbba511033b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.975778] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff0154d-137e-4003-a3cc-d87953df055c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.990199] env[61573]: DEBUG nova.compute.provider_tree [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.004105] env[61573]: DEBUG nova.scheduler.client.report [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 806.021590] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.676s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.021590] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 806.069664] env[61573]: DEBUG nova.compute.utils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 806.070416] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 806.070637] env[61573]: DEBUG nova.network.neutron [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 806.091422] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 806.205964] env[61573]: DEBUG nova.policy [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe89159c64fa44ab8ee9471df3f082a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '415cc2d7de384f7ca1b8c26e32974978', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 806.217292] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 806.223800] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3b1203b-9ac7-4cca-9b16-828c00c8a855 tempest-ServersTestManualDisk-2129984535 tempest-ServersTestManualDisk-2129984535-project-member] Acquiring lock "5be2b145-136b-4a1b-aafc-024180e9c398" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.224303] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3b1203b-9ac7-4cca-9b16-828c00c8a855 tempest-ServersTestManualDisk-2129984535 tempest-ServersTestManualDisk-2129984535-project-member] Lock "5be2b145-136b-4a1b-aafc-024180e9c398" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.254154] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 806.254565] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 806.254634] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 806.254817] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 806.254962] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 806.255132] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 806.255343] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 806.255504] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 806.255963] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 806.255963] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 806.256272] env[61573]: DEBUG nova.virt.hardware [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 806.257425] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256d54d8-b648-49b7-92dd-94a3e56d9380 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.267702] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f7f73f-f37d-4dd3-804c-1a30fc908bdb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.663910] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d4a1a92f-92d6-4558-a68f-0cef537686e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "42a7b548-07fb-496d-b527-3a7528321a50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.664164] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d4a1a92f-92d6-4558-a68f-0cef537686e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "42a7b548-07fb-496d-b527-3a7528321a50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.002906] env[61573]: DEBUG nova.network.neutron [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Successfully created port: 8b05419c-e1c4-49d4-857e-3fff9f5c8250 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 807.990803] env[61573]: DEBUG nova.network.neutron [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Successfully updated port: 8b05419c-e1c4-49d4-857e-3fff9f5c8250 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 808.004544] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "refresh_cache-338e2879-7dbe-4334-80da-4bbc1a071aa8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.004683] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquired lock "refresh_cache-338e2879-7dbe-4334-80da-4bbc1a071aa8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.004835] env[61573]: DEBUG nova.network.neutron [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 808.072067] env[61573]: DEBUG nova.network.neutron [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 808.208894] env[61573]: DEBUG nova.compute.manager [req-a7679c73-7956-41da-95e9-414fd77fc01d req-74ff498f-3c77-47f2-a445-bfda36c7a37d service nova] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Received event network-vif-plugged-8b05419c-e1c4-49d4-857e-3fff9f5c8250 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 808.209129] env[61573]: DEBUG oslo_concurrency.lockutils [req-a7679c73-7956-41da-95e9-414fd77fc01d req-74ff498f-3c77-47f2-a445-bfda36c7a37d service nova] Acquiring lock "338e2879-7dbe-4334-80da-4bbc1a071aa8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.209330] env[61573]: DEBUG oslo_concurrency.lockutils [req-a7679c73-7956-41da-95e9-414fd77fc01d req-74ff498f-3c77-47f2-a445-bfda36c7a37d service nova] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.209492] env[61573]: DEBUG oslo_concurrency.lockutils [req-a7679c73-7956-41da-95e9-414fd77fc01d req-74ff498f-3c77-47f2-a445-bfda36c7a37d service nova] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.209650] env[61573]: DEBUG nova.compute.manager [req-a7679c73-7956-41da-95e9-414fd77fc01d req-74ff498f-3c77-47f2-a445-bfda36c7a37d service nova] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] No waiting events found dispatching network-vif-plugged-8b05419c-e1c4-49d4-857e-3fff9f5c8250 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 808.209808] env[61573]: WARNING nova.compute.manager [req-a7679c73-7956-41da-95e9-414fd77fc01d req-74ff498f-3c77-47f2-a445-bfda36c7a37d service nova] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Received unexpected event network-vif-plugged-8b05419c-e1c4-49d4-857e-3fff9f5c8250 for instance with vm_state building and task_state spawning. [ 808.339358] env[61573]: DEBUG nova.network.neutron [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Updating instance_info_cache with network_info: [{"id": "8b05419c-e1c4-49d4-857e-3fff9f5c8250", "address": "fa:16:3e:0b:ba:db", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05419c-e1", "ovs_interfaceid": "8b05419c-e1c4-49d4-857e-3fff9f5c8250", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.355025] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Releasing lock "refresh_cache-338e2879-7dbe-4334-80da-4bbc1a071aa8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.355217] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Instance network_info: |[{"id": "8b05419c-e1c4-49d4-857e-3fff9f5c8250", "address": "fa:16:3e:0b:ba:db", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05419c-e1", "ovs_interfaceid": "8b05419c-e1c4-49d4-857e-3fff9f5c8250", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 808.355630] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:ba:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4c015b-4a8b-46ca-9556-74bad8db9fb3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b05419c-e1c4-49d4-857e-3fff9f5c8250', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.363753] env[61573]: DEBUG oslo.service.loopingcall [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.364285] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 808.364528] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4eb134af-bb3b-4b79-b240-5a3e527d5013 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.386268] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.386268] env[61573]: value = "task-4836767" [ 808.386268] env[61573]: _type = "Task" [ 808.386268] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.395543] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836767, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.830250] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "338e2879-7dbe-4334-80da-4bbc1a071aa8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.898242] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836767, 'name': CreateVM_Task, 'duration_secs': 0.354869} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.898416] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 808.899211] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.899295] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.899605] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 808.899853] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebf76172-34d8-4350-a542-9ba0c6200dc6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.905251] env[61573]: DEBUG oslo_vmware.api [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for the task: (returnval){ [ 808.905251] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524dd0c3-d7e8-3f46-08b5-f99b1a8f4e7e" [ 808.905251] env[61573]: _type = "Task" [ 808.905251] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.913595] env[61573]: DEBUG oslo_vmware.api [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524dd0c3-d7e8-3f46-08b5-f99b1a8f4e7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.417888] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.418212] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.418440] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.627240] env[61573]: DEBUG nova.compute.manager [req-19438cda-d263-4a5f-b67d-ecbe0740a728 req-af959c4e-22d4-42e5-bf2e-b3f035c7d2fe service nova] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Received event network-changed-8b05419c-e1c4-49d4-857e-3fff9f5c8250 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 810.627497] env[61573]: DEBUG nova.compute.manager [req-19438cda-d263-4a5f-b67d-ecbe0740a728 req-af959c4e-22d4-42e5-bf2e-b3f035c7d2fe service nova] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Refreshing instance network info cache due to event network-changed-8b05419c-e1c4-49d4-857e-3fff9f5c8250. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 810.628258] env[61573]: DEBUG oslo_concurrency.lockutils [req-19438cda-d263-4a5f-b67d-ecbe0740a728 req-af959c4e-22d4-42e5-bf2e-b3f035c7d2fe service nova] Acquiring lock "refresh_cache-338e2879-7dbe-4334-80da-4bbc1a071aa8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.628258] env[61573]: DEBUG oslo_concurrency.lockutils [req-19438cda-d263-4a5f-b67d-ecbe0740a728 req-af959c4e-22d4-42e5-bf2e-b3f035c7d2fe service nova] Acquired lock "refresh_cache-338e2879-7dbe-4334-80da-4bbc1a071aa8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.628258] env[61573]: DEBUG nova.network.neutron [req-19438cda-d263-4a5f-b67d-ecbe0740a728 req-af959c4e-22d4-42e5-bf2e-b3f035c7d2fe service nova] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Refreshing network info cache for port 8b05419c-e1c4-49d4-857e-3fff9f5c8250 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 811.021365] env[61573]: DEBUG nova.network.neutron [req-19438cda-d263-4a5f-b67d-ecbe0740a728 req-af959c4e-22d4-42e5-bf2e-b3f035c7d2fe service nova] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Updated VIF entry in instance network info cache for port 8b05419c-e1c4-49d4-857e-3fff9f5c8250. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 811.021365] env[61573]: DEBUG nova.network.neutron [req-19438cda-d263-4a5f-b67d-ecbe0740a728 req-af959c4e-22d4-42e5-bf2e-b3f035c7d2fe service nova] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Updating instance_info_cache with network_info: [{"id": "8b05419c-e1c4-49d4-857e-3fff9f5c8250", "address": "fa:16:3e:0b:ba:db", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05419c-e1", "ovs_interfaceid": "8b05419c-e1c4-49d4-857e-3fff9f5c8250", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.029789] env[61573]: DEBUG oslo_concurrency.lockutils [req-19438cda-d263-4a5f-b67d-ecbe0740a728 req-af959c4e-22d4-42e5-bf2e-b3f035c7d2fe service nova] Releasing lock "refresh_cache-338e2879-7dbe-4334-80da-4bbc1a071aa8" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.715471] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.715891] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.049928] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ccf7beb8-926d-4fbf-b897-eb35f310c7ab tempest-ServersTestJSON-566902269 tempest-ServersTestJSON-566902269-project-member] Acquiring lock "b2eba140-db65-403e-9abf-58a6737bf853" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.050346] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ccf7beb8-926d-4fbf-b897-eb35f310c7ab tempest-ServersTestJSON-566902269 tempest-ServersTestJSON-566902269-project-member] Lock "b2eba140-db65-403e-9abf-58a6737bf853" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.008104] env[61573]: DEBUG oslo_concurrency.lockutils [None req-564e56ff-7943-4e63-97d6-9518c35b681b tempest-ServerAddressesTestJSON-1771230949 tempest-ServerAddressesTestJSON-1771230949-project-member] Acquiring lock "b2654514-d2bb-4c48-b351-b449e2044ddc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.008104] env[61573]: DEBUG oslo_concurrency.lockutils [None req-564e56ff-7943-4e63-97d6-9518c35b681b tempest-ServerAddressesTestJSON-1771230949 tempest-ServerAddressesTestJSON-1771230949-project-member] Lock "b2654514-d2bb-4c48-b351-b449e2044ddc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.897654] env[61573]: DEBUG oslo_concurrency.lockutils [None req-13400ed2-7993-42c7-b817-76e7cd3c2d77 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "90be4957-04a8-40a1-a0ac-0cbaed2b1086" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.898070] env[61573]: DEBUG oslo_concurrency.lockutils [None req-13400ed2-7993-42c7-b817-76e7cd3c2d77 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "90be4957-04a8-40a1-a0ac-0cbaed2b1086" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.019636] env[61573]: DEBUG oslo_concurrency.lockutils [None req-182c5fc1-08f4-483a-b5e2-462c6b5a9987 tempest-ServerGroupTestJSON-1991982297 tempest-ServerGroupTestJSON-1991982297-project-member] Acquiring lock "0cecb88d-2c13-4171-9291-204d26979697" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.020023] env[61573]: DEBUG oslo_concurrency.lockutils [None req-182c5fc1-08f4-483a-b5e2-462c6b5a9987 tempest-ServerGroupTestJSON-1991982297 tempest-ServerGroupTestJSON-1991982297-project-member] Lock "0cecb88d-2c13-4171-9291-204d26979697" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.501678] env[61573]: DEBUG oslo_concurrency.lockutils [None req-61bb472e-865b-4052-a90c-98fab4030e91 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] Acquiring lock "d5f7e49d-4444-4131-89c1-e4abdd9c1e49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.501678] env[61573]: DEBUG oslo_concurrency.lockutils [None req-61bb472e-865b-4052-a90c-98fab4030e91 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] Lock "d5f7e49d-4444-4131-89c1-e4abdd9c1e49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.592258] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca4d8de4-5e94-4b8d-92b8-d3b5158b3701 tempest-ServerActionsTestJSON-2137638428 tempest-ServerActionsTestJSON-2137638428-project-member] Acquiring lock "f13f76f6-d764-4672-9a7d-37d687605348" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.592582] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca4d8de4-5e94-4b8d-92b8-d3b5158b3701 tempest-ServerActionsTestJSON-2137638428 tempest-ServerActionsTestJSON-2137638428-project-member] Lock "f13f76f6-d764-4672-9a7d-37d687605348" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.529213] env[61573]: WARNING oslo_vmware.rw_handles [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 851.529213] env[61573]: ERROR oslo_vmware.rw_handles [ 851.529901] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/3a37567e-c5d5-48c9-b999-85f42b1c4824/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 851.532035] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 851.532035] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Copying Virtual Disk [datastore2] vmware_temp/3a37567e-c5d5-48c9-b999-85f42b1c4824/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/3a37567e-c5d5-48c9-b999-85f42b1c4824/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 851.532035] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c47b86e1-7e90-4ec9-9450-9346de54b4f1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.541484] env[61573]: DEBUG oslo_vmware.api [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Waiting for the task: (returnval){ [ 851.541484] env[61573]: value = "task-4836768" [ 851.541484] env[61573]: _type = "Task" [ 851.541484] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.549881] env[61573]: DEBUG oslo_vmware.api [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Task: {'id': task-4836768, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.053111] env[61573]: DEBUG oslo_vmware.exceptions [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 852.053300] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.053752] env[61573]: ERROR nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 852.053752] env[61573]: Faults: ['InvalidArgument'] [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Traceback (most recent call last): [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] yield resources [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] self.driver.spawn(context, instance, image_meta, [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] self._fetch_image_if_missing(context, vi) [ 852.053752] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] image_cache(vi, tmp_image_ds_loc) [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] vm_util.copy_virtual_disk( [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] session._wait_for_task(vmdk_copy_task) [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] return self.wait_for_task(task_ref) [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] return evt.wait() [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] result = hub.switch() [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 852.054201] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] return self.greenlet.switch() [ 852.055534] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 852.055534] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] self.f(*self.args, **self.kw) [ 852.055534] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 852.055534] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] raise exceptions.translate_fault(task_info.error) [ 852.055534] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 852.055534] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Faults: ['InvalidArgument'] [ 852.055534] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] [ 852.055534] env[61573]: INFO nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Terminating instance [ 852.055929] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.055929] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 852.056506] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 852.056735] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 852.056930] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1feb9d4a-df5f-4034-b230-321db950aff9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.059293] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a106cf-49c0-4e52-b567-3ce7237d4069 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.066830] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 852.067101] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac635df1-989a-43a9-88e6-d04a35ae6167 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.069447] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 852.069623] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 852.070661] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c2e591b-441a-4574-bcb8-6d460674c444 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.075612] env[61573]: DEBUG oslo_vmware.api [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for the task: (returnval){ [ 852.075612] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52451756-7003-79e0-50fd-477df33b609f" [ 852.075612] env[61573]: _type = "Task" [ 852.075612] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.083664] env[61573]: DEBUG oslo_vmware.api [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52451756-7003-79e0-50fd-477df33b609f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.140989] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 852.141399] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 852.141704] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Deleting the datastore file [datastore2] 9cb4ec1b-e422-491d-8ee9-0103c740ea5b {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.142115] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f5f4963-84cf-4ba7-8711-3cc1397a5b81 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.149286] env[61573]: DEBUG oslo_vmware.api [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Waiting for the task: (returnval){ [ 852.149286] env[61573]: value = "task-4836770" [ 852.149286] env[61573]: _type = "Task" [ 852.149286] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.158266] env[61573]: DEBUG oslo_vmware.api [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Task: {'id': task-4836770, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.587312] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 852.587670] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Creating directory with path [datastore2] vmware_temp/6ed06398-f3f3-45e4-bfc1-9715b4f0f7e8/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 852.587893] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2db052fb-ca77-4bc2-9a3e-633903bba33d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.604026] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Created directory with path [datastore2] vmware_temp/6ed06398-f3f3-45e4-bfc1-9715b4f0f7e8/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 852.604026] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Fetch image to [datastore2] vmware_temp/6ed06398-f3f3-45e4-bfc1-9715b4f0f7e8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 852.604026] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/6ed06398-f3f3-45e4-bfc1-9715b4f0f7e8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 852.604026] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1335d7d5-3f02-4613-acfe-31ae095d3d8a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.610240] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6983c721-6307-431f-ae38-cd1c1001f8a2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.620991] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0baf56ce-ac1b-4467-9f64-023ddfe3a83d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.661433] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720b5e3e-ac05-475c-87b8-96e9d963d401 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.672068] env[61573]: DEBUG oslo_vmware.api [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Task: {'id': task-4836770, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084941} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.673774] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.674103] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 852.674329] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 852.674652] env[61573]: INFO nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 852.676778] env[61573]: DEBUG nova.compute.claims [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 852.676983] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.677250] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.679937] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-17b8c719-68e2-4952-8cc0-33f646b82127 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.704816] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 852.822687] env[61573]: DEBUG oslo_vmware.rw_handles [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ed06398-f3f3-45e4-bfc1-9715b4f0f7e8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 852.884220] env[61573]: DEBUG oslo_vmware.rw_handles [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 852.884220] env[61573]: DEBUG oslo_vmware.rw_handles [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ed06398-f3f3-45e4-bfc1-9715b4f0f7e8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 853.166738] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10e5554-289c-4eb3-b8d3-786f5cee0e2f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.174947] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849dbb33-07ac-4ee8-829e-8151b4b9b3c5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.212339] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6fba49-73b9-4d60-adbf-1c009a430831 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.220165] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43742314-fd6f-4d52-925a-8b5fb9eb1a93 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.235620] env[61573]: DEBUG nova.compute.provider_tree [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.247840] env[61573]: DEBUG nova.scheduler.client.report [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 853.264553] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.587s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.265131] env[61573]: ERROR nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 853.265131] env[61573]: Faults: ['InvalidArgument'] [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Traceback (most recent call last): [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] self.driver.spawn(context, instance, image_meta, [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] self._fetch_image_if_missing(context, vi) [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] image_cache(vi, tmp_image_ds_loc) [ 853.265131] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] vm_util.copy_virtual_disk( [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] session._wait_for_task(vmdk_copy_task) [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] return self.wait_for_task(task_ref) [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] return evt.wait() [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] result = hub.switch() [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] return self.greenlet.switch() [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 853.265497] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] self.f(*self.args, **self.kw) [ 853.265853] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 853.265853] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] raise exceptions.translate_fault(task_info.error) [ 853.265853] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 853.265853] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Faults: ['InvalidArgument'] [ 853.265853] env[61573]: ERROR nova.compute.manager [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] [ 853.265853] env[61573]: DEBUG nova.compute.utils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 853.267393] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Build of instance 9cb4ec1b-e422-491d-8ee9-0103c740ea5b was re-scheduled: A specified parameter was not correct: fileType [ 853.267393] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 853.267766] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 853.267937] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 853.268164] env[61573]: DEBUG nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 853.268344] env[61573]: DEBUG nova.network.neutron [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 853.653356] env[61573]: DEBUG nova.network.neutron [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.667727] env[61573]: INFO nova.compute.manager [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Took 0.40 seconds to deallocate network for instance. [ 853.779767] env[61573]: INFO nova.scheduler.client.report [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Deleted allocations for instance 9cb4ec1b-e422-491d-8ee9-0103c740ea5b [ 853.803785] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9f9957e6-ab0f-456d-a68d-be1a6b16376c tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 293.018s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.805060] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 92.720s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.805238] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Acquiring lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.805447] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.805610] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.809405] env[61573]: INFO nova.compute.manager [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Terminating instance [ 853.809405] env[61573]: DEBUG nova.compute.manager [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 853.809818] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 853.810697] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0dbcadc3-d328-445f-8764-eb54a52c8d28 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.820666] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b923b1-7387-4bc7-b55a-da1c329a44d5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.832637] env[61573]: DEBUG nova.compute.manager [None req-76893cfa-de01-4f69-8c6c-bbf996e6f4bc tempest-ServerActionsTestOtherA-1332715427 tempest-ServerActionsTestOtherA-1332715427-project-member] [instance: 79b5dc73-2168-46b5-8045-13e92666a69a] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 853.856243] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9cb4ec1b-e422-491d-8ee9-0103c740ea5b could not be found. [ 853.856451] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 853.856629] env[61573]: INFO nova.compute.manager [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 853.856877] env[61573]: DEBUG oslo.service.loopingcall [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.857118] env[61573]: DEBUG nova.compute.manager [-] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 853.857801] env[61573]: DEBUG nova.network.neutron [-] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 853.860156] env[61573]: DEBUG nova.compute.manager [None req-76893cfa-de01-4f69-8c6c-bbf996e6f4bc tempest-ServerActionsTestOtherA-1332715427 tempest-ServerActionsTestOtherA-1332715427-project-member] [instance: 79b5dc73-2168-46b5-8045-13e92666a69a] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 853.882030] env[61573]: DEBUG oslo_concurrency.lockutils [None req-76893cfa-de01-4f69-8c6c-bbf996e6f4bc tempest-ServerActionsTestOtherA-1332715427 tempest-ServerActionsTestOtherA-1332715427-project-member] Lock "79b5dc73-2168-46b5-8045-13e92666a69a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.198s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.893026] env[61573]: DEBUG nova.compute.manager [None req-43525257-a454-418c-8355-f17dade7cf69 tempest-ServerExternalEventsTest-1001139505 tempest-ServerExternalEventsTest-1001139505-project-member] [instance: 413e21a0-5b33-44d1-9964-82e3da294808] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 853.897253] env[61573]: DEBUG nova.network.neutron [-] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.904668] env[61573]: INFO nova.compute.manager [-] [instance: 9cb4ec1b-e422-491d-8ee9-0103c740ea5b] Took 0.05 seconds to deallocate network for instance. [ 853.924721] env[61573]: DEBUG nova.compute.manager [None req-43525257-a454-418c-8355-f17dade7cf69 tempest-ServerExternalEventsTest-1001139505 tempest-ServerExternalEventsTest-1001139505-project-member] [instance: 413e21a0-5b33-44d1-9964-82e3da294808] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 853.949172] env[61573]: DEBUG oslo_concurrency.lockutils [None req-43525257-a454-418c-8355-f17dade7cf69 tempest-ServerExternalEventsTest-1001139505 tempest-ServerExternalEventsTest-1001139505-project-member] Lock "413e21a0-5b33-44d1-9964-82e3da294808" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.164s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.964795] env[61573]: DEBUG nova.compute.manager [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] [instance: 02a2cbdf-c522-4ba5-9914-ccf1c51e0130] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 853.990433] env[61573]: DEBUG nova.compute.manager [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] [instance: 02a2cbdf-c522-4ba5-9914-ccf1c51e0130] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 854.018542] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7bc72e94-c90b-4947-9e3c-1ef9dc087c41 tempest-AttachInterfacesV270Test-1347334067 tempest-AttachInterfacesV270Test-1347334067-project-member] Lock "9cb4ec1b-e422-491d-8ee9-0103c740ea5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.214s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.033126] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] Lock "02a2cbdf-c522-4ba5-9914-ccf1c51e0130" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.405s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.043697] env[61573]: DEBUG nova.compute.manager [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] [instance: 1e843567-f13d-40fa-94a0-931655307053] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 854.070199] env[61573]: DEBUG nova.compute.manager [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] [instance: 1e843567-f13d-40fa-94a0-931655307053] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 854.094178] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] Lock "1e843567-f13d-40fa-94a0-931655307053" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.437s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.103879] env[61573]: DEBUG nova.compute.manager [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] [instance: 97a1e2dc-9c4c-419d-87aa-839f21dc6e23] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 854.131550] env[61573]: DEBUG nova.compute.manager [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] [instance: 97a1e2dc-9c4c-419d-87aa-839f21dc6e23] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 854.153495] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4c4ec326-20c2-4690-b9c2-8475dab58529 tempest-ListServersNegativeTestJSON-1481680282 tempest-ListServersNegativeTestJSON-1481680282-project-member] Lock "97a1e2dc-9c4c-419d-87aa-839f21dc6e23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.466s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.163468] env[61573]: DEBUG nova.compute.manager [None req-94a577dd-a200-4671-9ce6-92aa0a85fb65 tempest-ServersV294TestFqdnHostnames-1409359785 tempest-ServersV294TestFqdnHostnames-1409359785-project-member] [instance: 12a02fd4-ae5d-4e7f-96e1-27d50d54bc24] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 854.193024] env[61573]: DEBUG nova.compute.manager [None req-94a577dd-a200-4671-9ce6-92aa0a85fb65 tempest-ServersV294TestFqdnHostnames-1409359785 tempest-ServersV294TestFqdnHostnames-1409359785-project-member] [instance: 12a02fd4-ae5d-4e7f-96e1-27d50d54bc24] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 854.214239] env[61573]: DEBUG oslo_concurrency.lockutils [None req-94a577dd-a200-4671-9ce6-92aa0a85fb65 tempest-ServersV294TestFqdnHostnames-1409359785 tempest-ServersV294TestFqdnHostnames-1409359785-project-member] Lock "12a02fd4-ae5d-4e7f-96e1-27d50d54bc24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.839s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.238890] env[61573]: DEBUG nova.compute.manager [None req-9e9c040e-cd91-4689-b976-22089c6b3210 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] [instance: 3fb9b32d-193f-4968-8783-54d25253f94f] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 854.267976] env[61573]: DEBUG nova.compute.manager [None req-9e9c040e-cd91-4689-b976-22089c6b3210 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] [instance: 3fb9b32d-193f-4968-8783-54d25253f94f] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 854.295852] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9e9c040e-cd91-4689-b976-22089c6b3210 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] Lock "3fb9b32d-193f-4968-8783-54d25253f94f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.582s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.305672] env[61573]: DEBUG nova.compute.manager [None req-310ecc1e-fdf1-44b0-9ab2-c1e6da2f250b tempest-ServerActionsV293TestJSON-1012733847 tempest-ServerActionsV293TestJSON-1012733847-project-member] [instance: 12cd88c0-09c1-42de-8fdd-03139718ca17] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 854.332784] env[61573]: DEBUG nova.compute.manager [None req-310ecc1e-fdf1-44b0-9ab2-c1e6da2f250b tempest-ServerActionsV293TestJSON-1012733847 tempest-ServerActionsV293TestJSON-1012733847-project-member] [instance: 12cd88c0-09c1-42de-8fdd-03139718ca17] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 854.364601] env[61573]: DEBUG oslo_concurrency.lockutils [None req-310ecc1e-fdf1-44b0-9ab2-c1e6da2f250b tempest-ServerActionsV293TestJSON-1012733847 tempest-ServerActionsV293TestJSON-1012733847-project-member] Lock "12cd88c0-09c1-42de-8fdd-03139718ca17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.024s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.380030] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 854.439733] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.440014] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.442030] env[61573]: INFO nova.compute.claims [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.925918] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f9432d-2d96-4d02-b626-41bb15649e90 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.934074] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1adbba-b54f-449d-b607-175792e53b9a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.963999] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db645fd7-bbe0-4376-b264-1df885ee2072 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.972315] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c677a6a-a535-4c0d-a087-929205a47218 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.985979] env[61573]: DEBUG nova.compute.provider_tree [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.997865] env[61573]: DEBUG nova.scheduler.client.report [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.016460] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.576s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.016772] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 855.064666] env[61573]: DEBUG nova.compute.utils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 855.066239] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 855.067062] env[61573]: DEBUG nova.network.neutron [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 855.083449] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 855.174979] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 855.211019] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 855.211019] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 855.211019] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.211252] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 855.211377] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.211657] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 855.212075] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 855.212361] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 855.212636] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 855.212900] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 855.213216] env[61573]: DEBUG nova.virt.hardware [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 855.214751] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b800ff1-d524-4996-a957-02747f22a79d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.224816] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0855808f-b8dc-4494-b93e-b34809b19db7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.394356] env[61573]: DEBUG nova.policy [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47f43ce2fa4a4c8190358ef608c7e52a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de5ad5b083124bcab1da2e5a2ae152c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 856.010487] env[61573]: DEBUG nova.network.neutron [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Successfully created port: 952fbacf-5fed-4b0a-af30-02fd754923ae {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.404956] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 856.405169] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 857.088135] env[61573]: DEBUG nova.network.neutron [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Successfully updated port: 952fbacf-5fed-4b0a-af30-02fd754923ae {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.111581] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "refresh_cache-0d91230d-849f-4e64-8685-5298ee5ea5b1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.111850] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquired lock "refresh_cache-0d91230d-849f-4e64-8685-5298ee5ea5b1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.112768] env[61573]: DEBUG nova.network.neutron [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.165266] env[61573]: DEBUG nova.network.neutron [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.186747] env[61573]: DEBUG nova.compute.manager [req-afcd51b9-5b18-4acc-8971-046824296bd8 req-b3053c4c-100c-447b-adc2-0bf3264549a8 service nova] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Received event network-vif-plugged-952fbacf-5fed-4b0a-af30-02fd754923ae {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 857.186976] env[61573]: DEBUG oslo_concurrency.lockutils [req-afcd51b9-5b18-4acc-8971-046824296bd8 req-b3053c4c-100c-447b-adc2-0bf3264549a8 service nova] Acquiring lock "0d91230d-849f-4e64-8685-5298ee5ea5b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.187198] env[61573]: DEBUG oslo_concurrency.lockutils [req-afcd51b9-5b18-4acc-8971-046824296bd8 req-b3053c4c-100c-447b-adc2-0bf3264549a8 service nova] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.187365] env[61573]: DEBUG oslo_concurrency.lockutils [req-afcd51b9-5b18-4acc-8971-046824296bd8 req-b3053c4c-100c-447b-adc2-0bf3264549a8 service nova] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.187538] env[61573]: DEBUG nova.compute.manager [req-afcd51b9-5b18-4acc-8971-046824296bd8 req-b3053c4c-100c-447b-adc2-0bf3264549a8 service nova] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] No waiting events found dispatching network-vif-plugged-952fbacf-5fed-4b0a-af30-02fd754923ae {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 857.187698] env[61573]: WARNING nova.compute.manager [req-afcd51b9-5b18-4acc-8971-046824296bd8 req-b3053c4c-100c-447b-adc2-0bf3264549a8 service nova] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Received unexpected event network-vif-plugged-952fbacf-5fed-4b0a-af30-02fd754923ae for instance with vm_state building and task_state spawning. [ 857.400131] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.700044] env[61573]: DEBUG nova.network.neutron [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Updating instance_info_cache with network_info: [{"id": "952fbacf-5fed-4b0a-af30-02fd754923ae", "address": "fa:16:3e:fd:0d:62", "network": {"id": "4884c726-4c32-4eb5-8f19-7ce926c511ad", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1948393391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de5ad5b083124bcab1da2e5a2ae152c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap952fbacf-5f", "ovs_interfaceid": "952fbacf-5fed-4b0a-af30-02fd754923ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.724035] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Releasing lock "refresh_cache-0d91230d-849f-4e64-8685-5298ee5ea5b1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.724035] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Instance network_info: |[{"id": "952fbacf-5fed-4b0a-af30-02fd754923ae", "address": "fa:16:3e:fd:0d:62", "network": {"id": "4884c726-4c32-4eb5-8f19-7ce926c511ad", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1948393391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de5ad5b083124bcab1da2e5a2ae152c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap952fbacf-5f", "ovs_interfaceid": "952fbacf-5fed-4b0a-af30-02fd754923ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 857.724240] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:0d:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '952fbacf-5fed-4b0a-af30-02fd754923ae', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 857.730872] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Creating folder: Project (de5ad5b083124bcab1da2e5a2ae152c9). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 857.731497] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17fceec5-e8d4-4117-8671-2d3b35b74f83 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.744642] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Created folder: Project (de5ad5b083124bcab1da2e5a2ae152c9) in parent group-v942801. [ 857.744897] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Creating folder: Instances. Parent ref: group-v942845. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 857.745216] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab6496e0-a93b-45d5-a9c2-bfa507c0f9f3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.756669] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Created folder: Instances in parent group-v942845. [ 857.757037] env[61573]: DEBUG oslo.service.loopingcall [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.757261] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 857.757481] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c889ddd-a295-4cc9-9651-342ede598265 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.782637] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 857.782637] env[61573]: value = "task-4836773" [ 857.782637] env[61573]: _type = "Task" [ 857.782637] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.792707] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836773, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.292753] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836773, 'name': CreateVM_Task, 'duration_secs': 0.312795} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.293109] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 858.293655] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.293824] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.294164] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 858.294417] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a94c917b-904f-4ce2-aa37-a1159ce112f4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.299471] env[61573]: DEBUG oslo_vmware.api [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for the task: (returnval){ [ 858.299471] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5237147e-8a46-3f67-cb70-4fcace3b0187" [ 858.299471] env[61573]: _type = "Task" [ 858.299471] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.307576] env[61573]: DEBUG oslo_vmware.api [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5237147e-8a46-3f67-cb70-4fcace3b0187, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.403330] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.812475] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.812851] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.812965] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.259203] env[61573]: DEBUG nova.compute.manager [req-66b6c19f-3427-4e0f-a7fe-290f9a05cc66 req-8974ea07-77c3-4a62-8959-cb50dd8b2a49 service nova] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Received event network-changed-952fbacf-5fed-4b0a-af30-02fd754923ae {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 859.259402] env[61573]: DEBUG nova.compute.manager [req-66b6c19f-3427-4e0f-a7fe-290f9a05cc66 req-8974ea07-77c3-4a62-8959-cb50dd8b2a49 service nova] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Refreshing instance network info cache due to event network-changed-952fbacf-5fed-4b0a-af30-02fd754923ae. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 859.259689] env[61573]: DEBUG oslo_concurrency.lockutils [req-66b6c19f-3427-4e0f-a7fe-290f9a05cc66 req-8974ea07-77c3-4a62-8959-cb50dd8b2a49 service nova] Acquiring lock "refresh_cache-0d91230d-849f-4e64-8685-5298ee5ea5b1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.259741] env[61573]: DEBUG oslo_concurrency.lockutils [req-66b6c19f-3427-4e0f-a7fe-290f9a05cc66 req-8974ea07-77c3-4a62-8959-cb50dd8b2a49 service nova] Acquired lock "refresh_cache-0d91230d-849f-4e64-8685-5298ee5ea5b1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.259909] env[61573]: DEBUG nova.network.neutron [req-66b6c19f-3427-4e0f-a7fe-290f9a05cc66 req-8974ea07-77c3-4a62-8959-cb50dd8b2a49 service nova] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Refreshing network info cache for port 952fbacf-5fed-4b0a-af30-02fd754923ae {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 859.852938] env[61573]: DEBUG nova.network.neutron [req-66b6c19f-3427-4e0f-a7fe-290f9a05cc66 req-8974ea07-77c3-4a62-8959-cb50dd8b2a49 service nova] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Updated VIF entry in instance network info cache for port 952fbacf-5fed-4b0a-af30-02fd754923ae. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 859.853516] env[61573]: DEBUG nova.network.neutron [req-66b6c19f-3427-4e0f-a7fe-290f9a05cc66 req-8974ea07-77c3-4a62-8959-cb50dd8b2a49 service nova] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Updating instance_info_cache with network_info: [{"id": "952fbacf-5fed-4b0a-af30-02fd754923ae", "address": "fa:16:3e:fd:0d:62", "network": {"id": "4884c726-4c32-4eb5-8f19-7ce926c511ad", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1948393391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de5ad5b083124bcab1da2e5a2ae152c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap952fbacf-5f", "ovs_interfaceid": "952fbacf-5fed-4b0a-af30-02fd754923ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.863645] env[61573]: DEBUG oslo_concurrency.lockutils [req-66b6c19f-3427-4e0f-a7fe-290f9a05cc66 req-8974ea07-77c3-4a62-8959-cb50dd8b2a49 service nova] Releasing lock "refresh_cache-0d91230d-849f-4e64-8685-5298ee5ea5b1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.403625] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.403843] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.403987] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.416154] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.416381] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.416548] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.416698] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 860.417889] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ed1ed3-e91f-43ad-beb8-151fbd9a4627 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.427291] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a836c5-ea65-428a-8369-770f04de6286 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.441920] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdd7c7b-8f88-490f-9731-8b39b2cb009f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.449410] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca6d540-0c75-47f1-bff2-144f426133d7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.479510] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180562MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 860.479672] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.479874] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.564221] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.564221] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a9cca36b-ee0e-42b3-9c26-61c9b0715312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.564415] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c445065e-68e5-4dda-ba5d-314dc2da12cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.564415] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.564590] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.564739] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.564889] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.565047] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.565192] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.565314] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.577521] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.589438] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 46dd382d-f6d0-4bd5-b027-c07d01a9a595 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.600891] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 05122510-519e-43f3-96bf-51801559a5be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.612517] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.623194] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 79a02472-a364-42f6-9fe0-c030df8436b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.633395] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 80e12c49-e98a-4ece-a080-783f99fccabc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.645024] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 57358b9b-2bf1-47d9-a28c-7d45770604d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.655873] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5c09f011-1f2b-41f9-b1e3-1dee4e5b85d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.667103] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5e637ba3-4faf-45f0-9454-dc38d14756c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.678437] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 18ee5e56-b3d5-4152-a825-d2f814589d43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.689558] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5be2b145-136b-4a1b-aafc-024180e9c398 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.706489] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 42a7b548-07fb-496d-b527-3a7528321a50 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.717711] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.729092] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b2eba140-db65-403e-9abf-58a6737bf853 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.739664] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b2654514-d2bb-4c48-b351-b449e2044ddc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.751115] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 90be4957-04a8-40a1-a0ac-0cbaed2b1086 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.762644] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0cecb88d-2c13-4171-9291-204d26979697 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.773822] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d5f7e49d-4444-4131-89c1-e4abdd9c1e49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.790452] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f13f76f6-d764-4672-9a7d-37d687605348 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.790715] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 860.790934] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '25', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '3', 'num_os_type_None': '10', 'num_proj_308f1e086ba943b9b9cf27a1da4eda0a': '2', 'io_workload': '10', 'num_task_deleting': '7', 'num_proj_a5f0b55c024f4143a4e2a141e10c1db3': '1', 'num_proj_19a0cd3d99c04e37aba52b7de50f98c9': '1', 'num_proj_2bb57dd7a5d14a62bc23a9747d5d4ffe': '1', 'num_proj_6179168f3e4f4df09875705a0ee778df': '2', 'num_proj_58b3da30203f416e9b30c28a291d7b4e': '1', 'num_proj_415cc2d7de384f7ca1b8c26e32974978': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 861.200211] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21b4668-2250-4df3-ba52-6c01d339a6e2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.208578] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf06b5c-592f-429d-94ff-00c6b47a4c2a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.240117] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dda675-2d70-4f43-a5de-80c2e9c486d5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.248443] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6c0762-d0d0-4d95-adfc-59c72e7d05f5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.262432] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.271273] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 861.286889] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 861.286889] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.807s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.282155] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.282476] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.403862] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.404090] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 862.404218] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 862.426978] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.427212] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.427370] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.427598] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.427792] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.427952] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.428138] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.428296] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.428444] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.428587] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 862.428763] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 862.429341] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.809834] env[61573]: DEBUG oslo_concurrency.lockutils [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "0d91230d-849f-4e64-8685-5298ee5ea5b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.545048] env[61573]: WARNING oslo_vmware.rw_handles [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 901.545048] env[61573]: ERROR oslo_vmware.rw_handles [ 901.545761] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/6ed06398-f3f3-45e4-bfc1-9715b4f0f7e8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 901.548238] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 901.548589] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Copying Virtual Disk [datastore2] vmware_temp/6ed06398-f3f3-45e4-bfc1-9715b4f0f7e8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/6ed06398-f3f3-45e4-bfc1-9715b4f0f7e8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 901.548969] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97f45aa6-325a-4998-bcab-a91ca3a6b31e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.558994] env[61573]: DEBUG oslo_vmware.api [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for the task: (returnval){ [ 901.558994] env[61573]: value = "task-4836774" [ 901.558994] env[61573]: _type = "Task" [ 901.558994] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.568374] env[61573]: DEBUG oslo_vmware.api [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': task-4836774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.069856] env[61573]: DEBUG oslo_vmware.exceptions [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 902.069856] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.070425] env[61573]: ERROR nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 902.070425] env[61573]: Faults: ['InvalidArgument'] [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Traceback (most recent call last): [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] yield resources [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] self.driver.spawn(context, instance, image_meta, [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] self._fetch_image_if_missing(context, vi) [ 902.070425] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] image_cache(vi, tmp_image_ds_loc) [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] vm_util.copy_virtual_disk( [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] session._wait_for_task(vmdk_copy_task) [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] return self.wait_for_task(task_ref) [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] return evt.wait() [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] result = hub.switch() [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 902.070810] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] return self.greenlet.switch() [ 902.071297] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 902.071297] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] self.f(*self.args, **self.kw) [ 902.071297] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 902.071297] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] raise exceptions.translate_fault(task_info.error) [ 902.071297] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 902.071297] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Faults: ['InvalidArgument'] [ 902.071297] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] [ 902.071297] env[61573]: INFO nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Terminating instance [ 902.072506] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.072708] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.072939] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be29f004-5d00-44c4-8d05-f532b61a19dc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.076351] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 902.076479] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 902.077249] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f95972f-4d46-4389-a0fe-c695160dd0a0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.082232] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.082232] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 902.084027] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dc0d41a-e52e-4e73-b4a9-670dc570f237 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.087028] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 902.087522] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc8e0941-99d1-4b4f-8ef2-c37c2eb22ad6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.089830] env[61573]: DEBUG oslo_vmware.api [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for the task: (returnval){ [ 902.089830] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c8df4e-9a91-0406-d199-2a40364c6e1f" [ 902.089830] env[61573]: _type = "Task" [ 902.089830] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.098512] env[61573]: DEBUG oslo_vmware.api [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c8df4e-9a91-0406-d199-2a40364c6e1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.163031] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 902.163433] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 902.163688] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Deleting the datastore file [datastore2] 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.163986] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3dbe4e7c-4309-4407-85f1-f65c483fe6f2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.171024] env[61573]: DEBUG oslo_vmware.api [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for the task: (returnval){ [ 902.171024] env[61573]: value = "task-4836776" [ 902.171024] env[61573]: _type = "Task" [ 902.171024] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.179777] env[61573]: DEBUG oslo_vmware.api [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': task-4836776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.601098] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 902.601465] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Creating directory with path [datastore2] vmware_temp/41a250bb-c5d5-4812-9a7d-eb84b3cbbeed/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.601574] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0974b15-ad51-4be8-946d-6d74fd9cb800 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.613335] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Created directory with path [datastore2] vmware_temp/41a250bb-c5d5-4812-9a7d-eb84b3cbbeed/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.613530] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Fetch image to [datastore2] vmware_temp/41a250bb-c5d5-4812-9a7d-eb84b3cbbeed/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 902.613699] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/41a250bb-c5d5-4812-9a7d-eb84b3cbbeed/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 902.614474] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235f1b00-7fd4-47af-b079-9b41ec29ab9b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.621296] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf9852e-32d3-460e-bd66-35c4264b23ad {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.630565] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1afeca-6715-4d4b-9229-656786c7e0f0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.660805] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04226a62-fef8-4eab-909b-ccf47da0de07 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.667426] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-294b1f5c-bca1-448c-8bce-c0073ef61ca6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.679092] env[61573]: DEBUG oslo_vmware.api [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': task-4836776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08324} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.679349] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.679528] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 902.679699] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 902.679873] env[61573]: INFO nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 902.682253] env[61573]: DEBUG nova.compute.claims [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 902.682525] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.682814] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.692229] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 902.746980] env[61573]: DEBUG oslo_vmware.rw_handles [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/41a250bb-c5d5-4812-9a7d-eb84b3cbbeed/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 902.812897] env[61573]: DEBUG oslo_vmware.rw_handles [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 902.813201] env[61573]: DEBUG oslo_vmware.rw_handles [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/41a250bb-c5d5-4812-9a7d-eb84b3cbbeed/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 903.178615] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b6f6c4-0417-429b-993d-4a77cc069e40 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.188033] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d9fc5d-90fe-4b00-a9af-5e313964a3ae {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.218213] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb8aa8a-9c0e-467c-85f7-3887c6ea3847 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.226865] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ded4185-2330-4b57-850d-32a29b6da473 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.240031] env[61573]: DEBUG nova.compute.provider_tree [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.249435] env[61573]: DEBUG nova.scheduler.client.report [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 903.264433] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.582s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.264972] env[61573]: ERROR nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 903.264972] env[61573]: Faults: ['InvalidArgument'] [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Traceback (most recent call last): [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] self.driver.spawn(context, instance, image_meta, [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] self._fetch_image_if_missing(context, vi) [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] image_cache(vi, tmp_image_ds_loc) [ 903.264972] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] vm_util.copy_virtual_disk( [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] session._wait_for_task(vmdk_copy_task) [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] return self.wait_for_task(task_ref) [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] return evt.wait() [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] result = hub.switch() [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] return self.greenlet.switch() [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 903.265399] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] self.f(*self.args, **self.kw) [ 903.265819] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 903.265819] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] raise exceptions.translate_fault(task_info.error) [ 903.265819] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 903.265819] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Faults: ['InvalidArgument'] [ 903.265819] env[61573]: ERROR nova.compute.manager [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] [ 903.265819] env[61573]: DEBUG nova.compute.utils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 903.267178] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Build of instance 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3 was re-scheduled: A specified parameter was not correct: fileType [ 903.267178] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 903.267543] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 903.267715] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 903.267884] env[61573]: DEBUG nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 903.268063] env[61573]: DEBUG nova.network.neutron [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 903.642164] env[61573]: DEBUG nova.network.neutron [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.653835] env[61573]: INFO nova.compute.manager [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3] Took 0.38 seconds to deallocate network for instance. [ 903.757833] env[61573]: INFO nova.scheduler.client.report [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Deleted allocations for instance 35ffeaee-b94d-482b-a053-f6cb9d2a7bc3 [ 903.778566] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f99e5419-8372-4501-a335-173dece750dc tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "35ffeaee-b94d-482b-a053-f6cb9d2a7bc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 342.703s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.794338] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 903.848022] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.848022] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.851044] env[61573]: INFO nova.compute.claims [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 904.256830] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed60a872-42f4-4529-8d65-504e2b40af47 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.265572] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6969aea6-f51b-438d-b9d2-2988ba56fdc4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.295410] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73af4e0-9175-434a-b536-383ef4711f99 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.303814] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79903816-4911-449b-9f33-3f4263600959 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.319099] env[61573]: DEBUG nova.compute.provider_tree [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.328564] env[61573]: DEBUG nova.scheduler.client.report [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.344323] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.496s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.344831] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 904.394838] env[61573]: DEBUG nova.compute.utils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 904.396127] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 904.396287] env[61573]: DEBUG nova.network.neutron [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 904.412797] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 904.482891] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 904.486321] env[61573]: DEBUG nova.policy [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d4ed6e034d44273a869cedd3675e1b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27a816079ac04dabaf85dec4005df607', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 904.510021] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 904.510021] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 904.510021] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 904.510254] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 904.510254] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 904.510254] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 904.510452] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 904.510603] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 904.510787] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 904.510924] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 904.511159] env[61573]: DEBUG nova.virt.hardware [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 904.512014] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0662811b-f8c2-4fc9-b1ba-747f2a1d0e49 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.520712] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91eb727a-b554-4fda-b618-a0ff00e08e41 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.222038] env[61573]: DEBUG nova.network.neutron [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Successfully created port: c5565bed-5098-4620-87d4-51776d233061 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 905.918587] env[61573]: DEBUG nova.compute.manager [req-75b8cdf0-d52d-491f-a2f9-65c925b8a622 req-476e547a-b547-48d6-aa83-418867850d01 service nova] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Received event network-vif-plugged-c5565bed-5098-4620-87d4-51776d233061 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 905.918814] env[61573]: DEBUG oslo_concurrency.lockutils [req-75b8cdf0-d52d-491f-a2f9-65c925b8a622 req-476e547a-b547-48d6-aa83-418867850d01 service nova] Acquiring lock "36a5ff6d-5123-4323-8e86-3529828af0ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.919626] env[61573]: DEBUG oslo_concurrency.lockutils [req-75b8cdf0-d52d-491f-a2f9-65c925b8a622 req-476e547a-b547-48d6-aa83-418867850d01 service nova] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.919905] env[61573]: DEBUG oslo_concurrency.lockutils [req-75b8cdf0-d52d-491f-a2f9-65c925b8a622 req-476e547a-b547-48d6-aa83-418867850d01 service nova] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.920104] env[61573]: DEBUG nova.compute.manager [req-75b8cdf0-d52d-491f-a2f9-65c925b8a622 req-476e547a-b547-48d6-aa83-418867850d01 service nova] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] No waiting events found dispatching network-vif-plugged-c5565bed-5098-4620-87d4-51776d233061 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 905.920283] env[61573]: WARNING nova.compute.manager [req-75b8cdf0-d52d-491f-a2f9-65c925b8a622 req-476e547a-b547-48d6-aa83-418867850d01 service nova] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Received unexpected event network-vif-plugged-c5565bed-5098-4620-87d4-51776d233061 for instance with vm_state building and task_state spawning. [ 906.074733] env[61573]: DEBUG nova.network.neutron [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Successfully updated port: c5565bed-5098-4620-87d4-51776d233061 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 906.086896] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquiring lock "refresh_cache-36a5ff6d-5123-4323-8e86-3529828af0ab" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.087042] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquired lock "refresh_cache-36a5ff6d-5123-4323-8e86-3529828af0ab" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.087199] env[61573]: DEBUG nova.network.neutron [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 906.149634] env[61573]: DEBUG nova.network.neutron [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 906.407857] env[61573]: DEBUG nova.network.neutron [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Updating instance_info_cache with network_info: [{"id": "c5565bed-5098-4620-87d4-51776d233061", "address": "fa:16:3e:fb:05:9b", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5565bed-50", "ovs_interfaceid": "c5565bed-5098-4620-87d4-51776d233061", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.426187] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Releasing lock "refresh_cache-36a5ff6d-5123-4323-8e86-3529828af0ab" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.426187] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Instance network_info: |[{"id": "c5565bed-5098-4620-87d4-51776d233061", "address": "fa:16:3e:fb:05:9b", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5565bed-50", "ovs_interfaceid": "c5565bed-5098-4620-87d4-51776d233061", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 906.426474] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:05:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4c015b-4a8b-46ca-9556-74bad8db9fb3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5565bed-5098-4620-87d4-51776d233061', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 906.433066] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Creating folder: Project (27a816079ac04dabaf85dec4005df607). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 906.433661] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-549f81d9-23ab-4fac-8f9e-4c922aa29d3d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.445872] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Created folder: Project (27a816079ac04dabaf85dec4005df607) in parent group-v942801. [ 906.448018] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Creating folder: Instances. Parent ref: group-v942848. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 906.448018] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ac6eaea-9431-4c90-aaa2-5c5b2c01f63c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.458528] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Created folder: Instances in parent group-v942848. [ 906.458528] env[61573]: DEBUG oslo.service.loopingcall [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.458528] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 906.458528] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43c14138-c892-405a-8d3d-a9f3fbd1dbd8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.479510] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 906.479510] env[61573]: value = "task-4836779" [ 906.479510] env[61573]: _type = "Task" [ 906.479510] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.488174] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836779, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.990085] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836779, 'name': CreateVM_Task, 'duration_secs': 0.381496} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.990085] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 906.990085] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.990295] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.990517] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 906.990764] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecb511ac-173a-4f80-893a-2c3419a5ae72 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.995661] env[61573]: DEBUG oslo_vmware.api [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Waiting for the task: (returnval){ [ 906.995661] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52dd229f-919a-51c2-55e5-0ce2784305ff" [ 906.995661] env[61573]: _type = "Task" [ 906.995661] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.003560] env[61573]: DEBUG oslo_vmware.api [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52dd229f-919a-51c2-55e5-0ce2784305ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.506503] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.506965] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.506965] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.944432] env[61573]: DEBUG nova.compute.manager [req-8ac8e49f-873f-441d-b59c-5c03adc5cd2e req-0ea62895-bf16-405c-afbf-e0414324d5da service nova] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Received event network-changed-c5565bed-5098-4620-87d4-51776d233061 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 907.944631] env[61573]: DEBUG nova.compute.manager [req-8ac8e49f-873f-441d-b59c-5c03adc5cd2e req-0ea62895-bf16-405c-afbf-e0414324d5da service nova] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Refreshing instance network info cache due to event network-changed-c5565bed-5098-4620-87d4-51776d233061. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 907.944844] env[61573]: DEBUG oslo_concurrency.lockutils [req-8ac8e49f-873f-441d-b59c-5c03adc5cd2e req-0ea62895-bf16-405c-afbf-e0414324d5da service nova] Acquiring lock "refresh_cache-36a5ff6d-5123-4323-8e86-3529828af0ab" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.944983] env[61573]: DEBUG oslo_concurrency.lockutils [req-8ac8e49f-873f-441d-b59c-5c03adc5cd2e req-0ea62895-bf16-405c-afbf-e0414324d5da service nova] Acquired lock "refresh_cache-36a5ff6d-5123-4323-8e86-3529828af0ab" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.945444] env[61573]: DEBUG nova.network.neutron [req-8ac8e49f-873f-441d-b59c-5c03adc5cd2e req-0ea62895-bf16-405c-afbf-e0414324d5da service nova] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Refreshing network info cache for port c5565bed-5098-4620-87d4-51776d233061 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 908.286805] env[61573]: DEBUG nova.network.neutron [req-8ac8e49f-873f-441d-b59c-5c03adc5cd2e req-0ea62895-bf16-405c-afbf-e0414324d5da service nova] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Updated VIF entry in instance network info cache for port c5565bed-5098-4620-87d4-51776d233061. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 908.287510] env[61573]: DEBUG nova.network.neutron [req-8ac8e49f-873f-441d-b59c-5c03adc5cd2e req-0ea62895-bf16-405c-afbf-e0414324d5da service nova] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Updating instance_info_cache with network_info: [{"id": "c5565bed-5098-4620-87d4-51776d233061", "address": "fa:16:3e:fb:05:9b", "network": {"id": "787f5ae1-5587-4201-b9b3-f8b4ecf31375", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b6b1daa7c889493c969d61d1a6ca8f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5565bed-50", "ovs_interfaceid": "c5565bed-5098-4620-87d4-51776d233061", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.302621] env[61573]: DEBUG oslo_concurrency.lockutils [req-8ac8e49f-873f-441d-b59c-5c03adc5cd2e req-0ea62895-bf16-405c-afbf-e0414324d5da service nova] Releasing lock "refresh_cache-36a5ff6d-5123-4323-8e86-3529828af0ab" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.369307] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquiring lock "36a5ff6d-5123-4323-8e86-3529828af0ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.403646] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 918.404057] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 919.405984] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.399589] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.403301] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.403530] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 922.403687] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 922.424931] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.425123] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.425240] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.425367] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.425487] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.425608] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.425728] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.425848] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.425966] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.426981] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 922.426981] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 922.426981] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.426981] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.427238] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.427238] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.439692] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.439827] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.440090] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.440180] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 922.441435] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d973e88-f8ee-40b2-b5c5-67fcb349d467 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.451685] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad2b352-79f5-422a-8300-4cec8d07555b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.467035] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18ffa11-80b4-4175-a767-003128f41b33 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.473950] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee171ae-2d57-4708-b5d8-b73558932962 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.506679] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180560MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 922.506853] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.507763] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.586453] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a9cca36b-ee0e-42b3-9c26-61c9b0715312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.586656] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c445065e-68e5-4dda-ba5d-314dc2da12cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.586818] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.586954] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.587116] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.587272] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.587401] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.587552] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.587698] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.587824] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 922.601681] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 05122510-519e-43f3-96bf-51801559a5be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.613542] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.624728] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 79a02472-a364-42f6-9fe0-c030df8436b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.636285] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 80e12c49-e98a-4ece-a080-783f99fccabc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.650119] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 57358b9b-2bf1-47d9-a28c-7d45770604d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.662311] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5c09f011-1f2b-41f9-b1e3-1dee4e5b85d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.674065] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5e637ba3-4faf-45f0-9454-dc38d14756c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.684928] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 18ee5e56-b3d5-4152-a825-d2f814589d43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.696278] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5be2b145-136b-4a1b-aafc-024180e9c398 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.711622] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 42a7b548-07fb-496d-b527-3a7528321a50 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.724651] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.736485] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b2eba140-db65-403e-9abf-58a6737bf853 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.747634] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b2654514-d2bb-4c48-b351-b449e2044ddc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.760677] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 90be4957-04a8-40a1-a0ac-0cbaed2b1086 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.772489] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0cecb88d-2c13-4171-9291-204d26979697 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.783976] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d5f7e49d-4444-4131-89c1-e4abdd9c1e49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.796904] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f13f76f6-d764-4672-9a7d-37d687605348 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 922.797466] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 922.797710] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '26', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_308f1e086ba943b9b9cf27a1da4eda0a': '1', 'io_workload': '10', 'num_proj_a5f0b55c024f4143a4e2a141e10c1db3': '1', 'num_proj_19a0cd3d99c04e37aba52b7de50f98c9': '1', 'num_proj_2bb57dd7a5d14a62bc23a9747d5d4ffe': '1', 'num_task_spawning': '1', 'num_proj_6179168f3e4f4df09875705a0ee778df': '2', 'num_proj_58b3da30203f416e9b30c28a291d7b4e': '1', 'num_proj_415cc2d7de384f7ca1b8c26e32974978': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_27a816079ac04dabaf85dec4005df607': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 923.154033] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2b21c0-0916-481e-8376-669cf0b83fa8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.162377] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f43a73-6502-4ba6-b3b0-5a94054260f6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.193565] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95ef081-00f7-4195-8f41-a6f2ad6a356f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.201900] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07327f2d-fd48-4dfe-b432-099081cc14c8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.216061] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.224819] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 923.245564] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 923.245564] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.738s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.223146] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 952.472189] env[61573]: WARNING oslo_vmware.rw_handles [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 952.472189] env[61573]: ERROR oslo_vmware.rw_handles [ 952.472992] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/41a250bb-c5d5-4812-9a7d-eb84b3cbbeed/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 952.477049] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 952.477049] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Copying Virtual Disk [datastore2] vmware_temp/41a250bb-c5d5-4812-9a7d-eb84b3cbbeed/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/41a250bb-c5d5-4812-9a7d-eb84b3cbbeed/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 952.477049] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9429478a-8880-45bd-bd7f-7a459a27793a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.483921] env[61573]: DEBUG oslo_vmware.api [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for the task: (returnval){ [ 952.483921] env[61573]: value = "task-4836780" [ 952.483921] env[61573]: _type = "Task" [ 952.483921] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.492797] env[61573]: DEBUG oslo_vmware.api [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': task-4836780, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.995368] env[61573]: DEBUG oslo_vmware.exceptions [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 952.995671] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.996239] env[61573]: ERROR nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 952.996239] env[61573]: Faults: ['InvalidArgument'] [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Traceback (most recent call last): [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] yield resources [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] self.driver.spawn(context, instance, image_meta, [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] self._vmops.spawn(context, instance, image_meta, injected_files, [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] self._fetch_image_if_missing(context, vi) [ 952.996239] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] image_cache(vi, tmp_image_ds_loc) [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] vm_util.copy_virtual_disk( [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] session._wait_for_task(vmdk_copy_task) [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] return self.wait_for_task(task_ref) [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] return evt.wait() [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] result = hub.switch() [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 952.996715] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] return self.greenlet.switch() [ 952.997078] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 952.997078] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] self.f(*self.args, **self.kw) [ 952.997078] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 952.997078] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] raise exceptions.translate_fault(task_info.error) [ 952.997078] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 952.997078] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Faults: ['InvalidArgument'] [ 952.997078] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] [ 952.997078] env[61573]: INFO nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Terminating instance [ 952.998180] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.998363] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.998604] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a8962fb-8a54-4e7d-946b-de8e9b31fcb7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.001056] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 953.001247] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 953.002056] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee89d88-c0a4-4f5b-ace7-4d7970cffec5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.010388] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 953.011576] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d02dde2-2cbb-4345-942a-88467b9bef20 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.013078] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.013249] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 953.013909] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e763f4f-4107-40b4-b89f-e1b8e3b8c441 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.019499] env[61573]: DEBUG oslo_vmware.api [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Waiting for the task: (returnval){ [ 953.019499] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]525ad37a-f3ac-e45a-be11-0af7d63a4100" [ 953.019499] env[61573]: _type = "Task" [ 953.019499] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.026677] env[61573]: DEBUG oslo_vmware.api [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]525ad37a-f3ac-e45a-be11-0af7d63a4100, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.086438] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 953.086665] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 953.086842] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Deleting the datastore file [datastore2] a9cca36b-ee0e-42b3-9c26-61c9b0715312 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.087150] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50056e5f-ecaa-47aa-8bf2-4583860eceaf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.094548] env[61573]: DEBUG oslo_vmware.api [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for the task: (returnval){ [ 953.094548] env[61573]: value = "task-4836782" [ 953.094548] env[61573]: _type = "Task" [ 953.094548] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.103322] env[61573]: DEBUG oslo_vmware.api [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': task-4836782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.534841] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 953.535290] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Creating directory with path [datastore2] vmware_temp/0d6711c0-0158-40e3-9329-5b59d29d90e6/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.535665] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdc53420-5475-459d-bf66-5b6fb70b1256 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.549113] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Created directory with path [datastore2] vmware_temp/0d6711c0-0158-40e3-9329-5b59d29d90e6/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.549334] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Fetch image to [datastore2] vmware_temp/0d6711c0-0158-40e3-9329-5b59d29d90e6/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 953.549509] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/0d6711c0-0158-40e3-9329-5b59d29d90e6/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 953.550321] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a7aecd-3ceb-4ea8-9362-ca12c959aed2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.558955] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594ee160-6b19-45e7-9834-5d226766d530 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.568857] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d4b6b4-5c81-4e97-ac13-4b1d032f0920 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.602660] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26e85ab-aeed-449f-99f6-fbb2ce9927a5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.611985] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1a7c063f-337a-43ed-9441-f5549e275486 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.613846] env[61573]: DEBUG oslo_vmware.api [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Task: {'id': task-4836782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083822} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.614123] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.614312] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 953.614482] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 953.614650] env[61573]: INFO nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Took 0.61 seconds to destroy the instance on the hypervisor. [ 953.618677] env[61573]: DEBUG nova.compute.claims [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 953.618854] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.619078] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.636171] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 953.695138] env[61573]: DEBUG oslo_vmware.rw_handles [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0d6711c0-0158-40e3-9329-5b59d29d90e6/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 953.755160] env[61573]: DEBUG oslo_vmware.rw_handles [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 953.755367] env[61573]: DEBUG oslo_vmware.rw_handles [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0d6711c0-0158-40e3-9329-5b59d29d90e6/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 954.064640] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3aabf7-a5be-464b-a200-45e9d4eb4278 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.074124] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2248ab27-3d14-4f02-819f-00b39b6db69c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.106920] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fcbf81-661b-4343-b06e-c2bc0deaf762 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.115494] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da250174-4671-4cf5-b8af-2289c4413219 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.131696] env[61573]: DEBUG nova.compute.provider_tree [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.143869] env[61573]: DEBUG nova.scheduler.client.report [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 954.163686] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.544s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.164552] env[61573]: ERROR nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.164552] env[61573]: Faults: ['InvalidArgument'] [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Traceback (most recent call last): [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] self.driver.spawn(context, instance, image_meta, [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] self._vmops.spawn(context, instance, image_meta, injected_files, [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] self._fetch_image_if_missing(context, vi) [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] image_cache(vi, tmp_image_ds_loc) [ 954.164552] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] vm_util.copy_virtual_disk( [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] session._wait_for_task(vmdk_copy_task) [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] return self.wait_for_task(task_ref) [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] return evt.wait() [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] result = hub.switch() [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] return self.greenlet.switch() [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 954.165088] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] self.f(*self.args, **self.kw) [ 954.165430] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 954.165430] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] raise exceptions.translate_fault(task_info.error) [ 954.165430] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.165430] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Faults: ['InvalidArgument'] [ 954.165430] env[61573]: ERROR nova.compute.manager [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] [ 954.165430] env[61573]: DEBUG nova.compute.utils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 954.166670] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Build of instance a9cca36b-ee0e-42b3-9c26-61c9b0715312 was re-scheduled: A specified parameter was not correct: fileType [ 954.166670] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 954.167085] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 954.167264] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 954.167436] env[61573]: DEBUG nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 954.167602] env[61573]: DEBUG nova.network.neutron [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 954.629319] env[61573]: DEBUG nova.network.neutron [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.642324] env[61573]: INFO nova.compute.manager [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Took 0.47 seconds to deallocate network for instance. [ 954.785043] env[61573]: INFO nova.scheduler.client.report [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Deleted allocations for instance a9cca36b-ee0e-42b3-9c26-61c9b0715312 [ 954.808862] env[61573]: DEBUG oslo_concurrency.lockutils [None req-20df0e45-0781-4847-85ca-350548a7a394 tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 388.339s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.809994] env[61573]: DEBUG oslo_concurrency.lockutils [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 187.240s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.810220] env[61573]: DEBUG oslo_concurrency.lockutils [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Acquiring lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.810430] env[61573]: DEBUG oslo_concurrency.lockutils [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.810595] env[61573]: DEBUG oslo_concurrency.lockutils [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.814672] env[61573]: INFO nova.compute.manager [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Terminating instance [ 954.816484] env[61573]: DEBUG nova.compute.manager [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 954.816643] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 954.816902] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e7774f8-0154-4947-8c71-81f496d4e4f4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.822355] env[61573]: DEBUG nova.compute.manager [None req-96c9dd6a-c590-4347-a8ae-e1730a40a27b tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 46dd382d-f6d0-4bd5-b027-c07d01a9a595] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 954.829141] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5a1fff-1a38-4fd6-b888-22f62e1420ac {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.863505] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a9cca36b-ee0e-42b3-9c26-61c9b0715312 could not be found. [ 954.863698] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.863884] env[61573]: INFO nova.compute.manager [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Took 0.05 seconds to destroy the instance on the hypervisor. [ 954.864147] env[61573]: DEBUG oslo.service.loopingcall [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.864565] env[61573]: DEBUG nova.compute.manager [None req-96c9dd6a-c590-4347-a8ae-e1730a40a27b tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 46dd382d-f6d0-4bd5-b027-c07d01a9a595] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 954.865474] env[61573]: DEBUG nova.compute.manager [-] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 954.865575] env[61573]: DEBUG nova.network.neutron [-] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 954.901296] env[61573]: DEBUG oslo_concurrency.lockutils [None req-96c9dd6a-c590-4347-a8ae-e1730a40a27b tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "46dd382d-f6d0-4bd5-b027-c07d01a9a595" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.709s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.904139] env[61573]: DEBUG nova.network.neutron [-] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.910479] env[61573]: DEBUG nova.compute.manager [None req-950ce301-d7c2-4e38-8710-57b5fc928e59 tempest-TenantUsagesTestJSON-2084051121 tempest-TenantUsagesTestJSON-2084051121-project-member] [instance: 05122510-519e-43f3-96bf-51801559a5be] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 954.916442] env[61573]: INFO nova.compute.manager [-] [instance: a9cca36b-ee0e-42b3-9c26-61c9b0715312] Took 0.05 seconds to deallocate network for instance. [ 954.935137] env[61573]: DEBUG nova.compute.manager [None req-950ce301-d7c2-4e38-8710-57b5fc928e59 tempest-TenantUsagesTestJSON-2084051121 tempest-TenantUsagesTestJSON-2084051121-project-member] [instance: 05122510-519e-43f3-96bf-51801559a5be] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 954.960377] env[61573]: DEBUG oslo_concurrency.lockutils [None req-950ce301-d7c2-4e38-8710-57b5fc928e59 tempest-TenantUsagesTestJSON-2084051121 tempest-TenantUsagesTestJSON-2084051121-project-member] Lock "05122510-519e-43f3-96bf-51801559a5be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.684s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.974300] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 955.018166] env[61573]: DEBUG oslo_concurrency.lockutils [None req-00b693e6-5c94-40e8-8e78-9676732c6dba tempest-ServersAdminTestJSON-1166922298 tempest-ServersAdminTestJSON-1166922298-project-member] Lock "a9cca36b-ee0e-42b3-9c26-61c9b0715312" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.054046] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.054307] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.055795] env[61573]: INFO nova.compute.claims [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.452046] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdcdaf7-8b0f-4451-b731-583ab4869f60 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.460513] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f8ffca-42d1-4e66-83d6-1737857e8e6c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.496093] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ba4041-c8d2-4663-b0ca-1096b92cb1ac {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.506349] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacbba9a-ed37-4f00-914c-bd570085a32d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.524195] env[61573]: DEBUG nova.compute.provider_tree [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.536383] env[61573]: DEBUG nova.scheduler.client.report [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 955.554827] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.500s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.555534] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 955.618184] env[61573]: DEBUG nova.compute.utils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 955.618184] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Not allocating networking since 'none' was specified. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 955.645788] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 955.733786] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 955.769463] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 955.769679] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 955.769838] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.770042] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 955.770229] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.770325] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 955.770536] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 955.770712] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 955.770928] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 955.771022] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 955.771224] env[61573]: DEBUG nova.virt.hardware [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 955.772661] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96c7308-1a88-4aaa-8958-dda1f5c18ac8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.783896] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bebfd85-e1c7-495c-8d33-5a4a35d5dbbe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.797766] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Instance VIF info [] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.803849] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Creating folder: Project (09c0dcd38fb64257ba2c08c59f75f097). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 955.804575] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c01b8ba-6f8c-406a-a350-fc1733bf51d3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.816345] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Created folder: Project (09c0dcd38fb64257ba2c08c59f75f097) in parent group-v942801. [ 955.816931] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Creating folder: Instances. Parent ref: group-v942851. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 955.817282] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92c88117-0f50-4003-bf22-ac78d60a5f2b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.828685] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Created folder: Instances in parent group-v942851. [ 955.829162] env[61573]: DEBUG oslo.service.loopingcall [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.829406] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 955.829637] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe19ef07-1664-42c5-bc83-7cff89ade4c0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.853327] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.853327] env[61573]: value = "task-4836785" [ 955.853327] env[61573]: _type = "Task" [ 955.853327] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.861259] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836785, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.363963] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836785, 'name': CreateVM_Task, 'duration_secs': 0.353066} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.364407] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 956.367027] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.367027] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.367027] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 956.367027] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3a6b94d-098a-4173-bf00-d525a483486d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.373136] env[61573]: DEBUG oslo_vmware.api [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Waiting for the task: (returnval){ [ 956.373136] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524962aa-06c4-1eba-36e6-de614c4dcf84" [ 956.373136] env[61573]: _type = "Task" [ 956.373136] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.383463] env[61573]: DEBUG oslo_vmware.api [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524962aa-06c4-1eba-36e6-de614c4dcf84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.887506] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.888034] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.888455] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.842024] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquiring lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.842024] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.388179] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquiring lock "8619fddd-ad89-42b3-95c5-55def25b6df2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.388179] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.836386] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquiring lock "b1392e68-7dc9-4399-88a6-9463e06917b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.836735] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "b1392e68-7dc9-4399-88a6-9463e06917b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.334381] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.936463] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5400e020-3bbc-4ec9-b2df-b383570d678f tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquiring lock "404b990e-a6c8-4166-be41-e49d44269fc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.936943] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5400e020-3bbc-4ec9-b2df-b383570d678f tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "404b990e-a6c8-4166-be41-e49d44269fc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.404662] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.405036] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.405145] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 980.400592] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.404081] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.420797] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.421052] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.421220] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.421370] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 982.422732] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794c8500-6b37-4392-9561-2ad1bc3a3271 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.432245] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0327c9e-a785-4da8-9048-a2f39c42c632 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.447231] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a6bcdf-c050-4200-9927-31b8f52cf93a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.455100] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d887efee-5147-4bbe-bf03-6fdde18c07d4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.487068] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180554MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 982.487230] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.487426] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.590880] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c445065e-68e5-4dda-ba5d-314dc2da12cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.590880] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.591031] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.591061] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.592329] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.592329] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.594314] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.594314] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.594314] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.594314] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 982.611374] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 79a02472-a364-42f6-9fe0-c030df8436b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.627389] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 80e12c49-e98a-4ece-a080-783f99fccabc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.641800] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 57358b9b-2bf1-47d9-a28c-7d45770604d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.657499] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5c09f011-1f2b-41f9-b1e3-1dee4e5b85d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.677663] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5e637ba3-4faf-45f0-9454-dc38d14756c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.695711] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 18ee5e56-b3d5-4152-a825-d2f814589d43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.713020] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 5be2b145-136b-4a1b-aafc-024180e9c398 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.726524] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 42a7b548-07fb-496d-b527-3a7528321a50 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.744422] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.758221] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b2eba140-db65-403e-9abf-58a6737bf853 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.773487] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b2654514-d2bb-4c48-b351-b449e2044ddc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.784948] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 90be4957-04a8-40a1-a0ac-0cbaed2b1086 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.799813] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0cecb88d-2c13-4171-9291-204d26979697 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.819280] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d5f7e49d-4444-4131-89c1-e4abdd9c1e49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.833980] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f13f76f6-d764-4672-9a7d-37d687605348 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.857806] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.873404] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.889287] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.901547] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 404b990e-a6c8-4166-be41-e49d44269fc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 982.901963] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 982.902144] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '29', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_a5f0b55c024f4143a4e2a141e10c1db3': '1', 'io_workload': '10', 'num_proj_19a0cd3d99c04e37aba52b7de50f98c9': '1', 'num_proj_2bb57dd7a5d14a62bc23a9747d5d4ffe': '1', 'num_proj_6179168f3e4f4df09875705a0ee778df': '2', 'num_proj_58b3da30203f416e9b30c28a291d7b4e': '1', 'num_proj_415cc2d7de384f7ca1b8c26e32974978': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_27a816079ac04dabaf85dec4005df607': '1', 'num_task_spawning': '1', 'num_proj_09c0dcd38fb64257ba2c08c59f75f097': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 983.421794] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e45b576-c02d-4a5f-9343-2384487fb384 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.431407] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0981a40c-8a2c-4355-8e21-d827dec7f362 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.468074] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a3be8e-85ed-4767-82b0-7abdd039c313 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.476632] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c0c0c0-5c9e-4045-bda7-af4e76081682 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.491108] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.502296] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 983.521951] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 983.522220] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.035s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.519418] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.519784] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.519999] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 984.520200] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 984.548336] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.548486] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.548639] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.548824] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.548954] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.549085] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.549203] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.551018] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.551018] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.551018] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 984.551018] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 984.551018] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.551436] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.551436] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.551436] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.764806] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5a5bf772-239b-447d-945c-76a526e54172 tempest-InstanceActionsTestJSON-975270587 tempest-InstanceActionsTestJSON-975270587-project-member] Acquiring lock "1eb74431-a48c-4427-97dc-a9ce4666605a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.765379] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5a5bf772-239b-447d-945c-76a526e54172 tempest-InstanceActionsTestJSON-975270587 tempest-InstanceActionsTestJSON-975270587-project-member] Lock "1eb74431-a48c-4427-97dc-a9ce4666605a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.268539] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a22f1102-0917-4d4b-8ee3-f8e4e4b9bb46 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "cd4fae54-ce8d-491c-9244-a32fd88e0183" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.268539] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a22f1102-0917-4d4b-8ee3-f8e4e4b9bb46 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "cd4fae54-ce8d-491c-9244-a32fd88e0183" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.421224] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.315617] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6634c392-cb95-4884-a6a9-41d9d577904d tempest-ServersTestBootFromVolume-584192889 tempest-ServersTestBootFromVolume-584192889-project-member] Acquiring lock "4cd581ab-601a-4854-8b3a-5f368b40f2c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.316014] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6634c392-cb95-4884-a6a9-41d9d577904d tempest-ServersTestBootFromVolume-584192889 tempest-ServersTestBootFromVolume-584192889-project-member] Lock "4cd581ab-601a-4854-8b3a-5f368b40f2c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.780832] env[61573]: DEBUG oslo_concurrency.lockutils [None req-464dc338-f7e4-4e66-97df-c4c28427c64e tempest-ServerRescueTestJSON-231905722 tempest-ServerRescueTestJSON-231905722-project-member] Acquiring lock "c93aa98a-2c79-4ae3-ba66-7b2e4b67291b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.780832] env[61573]: DEBUG oslo_concurrency.lockutils [None req-464dc338-f7e4-4e66-97df-c4c28427c64e tempest-ServerRescueTestJSON-231905722 tempest-ServerRescueTestJSON-231905722-project-member] Lock "c93aa98a-2c79-4ae3-ba66-7b2e4b67291b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.148974] env[61573]: WARNING oslo_vmware.rw_handles [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1001.148974] env[61573]: ERROR oslo_vmware.rw_handles [ 1001.148974] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/0d6711c0-0158-40e3-9329-5b59d29d90e6/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1001.151112] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1001.152030] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Copying Virtual Disk [datastore2] vmware_temp/0d6711c0-0158-40e3-9329-5b59d29d90e6/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/0d6711c0-0158-40e3-9329-5b59d29d90e6/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1001.152030] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-643720b4-d519-45c5-abaf-2db58aa96e95 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.160814] env[61573]: DEBUG oslo_vmware.api [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Waiting for the task: (returnval){ [ 1001.160814] env[61573]: value = "task-4836796" [ 1001.160814] env[61573]: _type = "Task" [ 1001.160814] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.170902] env[61573]: DEBUG oslo_vmware.api [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Task: {'id': task-4836796, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.672790] env[61573]: DEBUG oslo_vmware.exceptions [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1001.673127] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.673789] env[61573]: ERROR nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1001.673789] env[61573]: Faults: ['InvalidArgument'] [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Traceback (most recent call last): [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] yield resources [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] self.driver.spawn(context, instance, image_meta, [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] self._fetch_image_if_missing(context, vi) [ 1001.673789] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] image_cache(vi, tmp_image_ds_loc) [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] vm_util.copy_virtual_disk( [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] session._wait_for_task(vmdk_copy_task) [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] return self.wait_for_task(task_ref) [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] return evt.wait() [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] result = hub.switch() [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1001.674376] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] return self.greenlet.switch() [ 1001.674923] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1001.674923] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] self.f(*self.args, **self.kw) [ 1001.674923] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1001.674923] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] raise exceptions.translate_fault(task_info.error) [ 1001.674923] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1001.674923] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Faults: ['InvalidArgument'] [ 1001.674923] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] [ 1001.674923] env[61573]: INFO nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Terminating instance [ 1001.675814] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.676047] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.676331] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66243241-4a56-4911-9d24-ec0d4fa61c7e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.678944] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1001.679163] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1001.679991] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b59291-2a25-44f5-aaec-c2ecd98280d3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.689842] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1001.690152] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-328b4af9-7819-43b5-875e-efdef11f66d1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.693225] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.693412] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1001.694482] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-678efb12-9151-4dfa-a51a-0337cdc3e6c3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.700319] env[61573]: DEBUG oslo_vmware.api [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Waiting for the task: (returnval){ [ 1001.700319] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b415c0-dfe5-8587-ba32-292bff31fabe" [ 1001.700319] env[61573]: _type = "Task" [ 1001.700319] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.711458] env[61573]: DEBUG oslo_vmware.api [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b415c0-dfe5-8587-ba32-292bff31fabe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.768024] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1001.768024] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1001.768024] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Deleting the datastore file [datastore2] c445065e-68e5-4dda-ba5d-314dc2da12cf {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.768024] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-583e8527-a7da-4828-9beb-5cb6755518ba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.773683] env[61573]: DEBUG oslo_vmware.api [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Waiting for the task: (returnval){ [ 1001.773683] env[61573]: value = "task-4836798" [ 1001.773683] env[61573]: _type = "Task" [ 1001.773683] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.782787] env[61573]: DEBUG oslo_vmware.api [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Task: {'id': task-4836798, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.211764] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1002.212070] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Creating directory with path [datastore2] vmware_temp/eceafdbf-c64d-41cb-ad3d-a8b2af6ffd7e/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.212318] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a9f3ff3-da91-4dd1-849f-535cdbf6dd04 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.235078] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Created directory with path [datastore2] vmware_temp/eceafdbf-c64d-41cb-ad3d-a8b2af6ffd7e/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.235296] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Fetch image to [datastore2] vmware_temp/eceafdbf-c64d-41cb-ad3d-a8b2af6ffd7e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1002.235465] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/eceafdbf-c64d-41cb-ad3d-a8b2af6ffd7e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1002.236297] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efa8af5-98b5-4f55-85f2-cd2190424b2d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.244890] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec61407b-77ed-4c85-b7e1-5e7758eb6606 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.255785] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51c9dee-9d50-40d7-aa61-08633d1685bb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.290966] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ae8cd1-b706-4f74-a4a5-690470878c0f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.301393] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-50cc7534-f1dd-474b-bfc0-0fdacc10a788 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.303285] env[61573]: DEBUG oslo_vmware.api [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Task: {'id': task-4836798, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080177} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.303546] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.303728] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1002.303951] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1002.304093] env[61573]: INFO nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1002.306357] env[61573]: DEBUG nova.compute.claims [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1002.306529] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.306747] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.335447] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1002.403960] env[61573]: DEBUG oslo_vmware.rw_handles [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/eceafdbf-c64d-41cb-ad3d-a8b2af6ffd7e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1002.468406] env[61573]: DEBUG oslo_vmware.rw_handles [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1002.468599] env[61573]: DEBUG oslo_vmware.rw_handles [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/eceafdbf-c64d-41cb-ad3d-a8b2af6ffd7e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1002.863104] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae07c88d-398e-4ebc-ab75-6843c01a3195 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.870510] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d2644c-824b-40ea-93b4-9a642b342e3b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.907008] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3e7cbc-6e38-4a1d-b337-a45bc9032f6b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.913797] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8753a2c9-d3a3-4648-ade4-c5d8d7097b39 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] Acquiring lock "7b28cf4f-5ba2-4fde-8c53-7a403166ae2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.914126] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8753a2c9-d3a3-4648-ade4-c5d8d7097b39 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] Lock "7b28cf4f-5ba2-4fde-8c53-7a403166ae2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.920146] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3317856c-18ae-48af-994f-2837b6644860 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.935950] env[61573]: DEBUG nova.compute.provider_tree [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.945615] env[61573]: DEBUG nova.scheduler.client.report [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1002.961083] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.654s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.961708] env[61573]: ERROR nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1002.961708] env[61573]: Faults: ['InvalidArgument'] [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Traceback (most recent call last): [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] self.driver.spawn(context, instance, image_meta, [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] self._fetch_image_if_missing(context, vi) [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] image_cache(vi, tmp_image_ds_loc) [ 1002.961708] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] vm_util.copy_virtual_disk( [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] session._wait_for_task(vmdk_copy_task) [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] return self.wait_for_task(task_ref) [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] return evt.wait() [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] result = hub.switch() [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] return self.greenlet.switch() [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1002.962114] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] self.f(*self.args, **self.kw) [ 1002.962462] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1002.962462] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] raise exceptions.translate_fault(task_info.error) [ 1002.962462] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1002.962462] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Faults: ['InvalidArgument'] [ 1002.962462] env[61573]: ERROR nova.compute.manager [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] [ 1002.962833] env[61573]: DEBUG nova.compute.utils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1002.969636] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Build of instance c445065e-68e5-4dda-ba5d-314dc2da12cf was re-scheduled: A specified parameter was not correct: fileType [ 1002.969636] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1002.970111] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1002.970291] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1002.970458] env[61573]: DEBUG nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1002.970618] env[61573]: DEBUG nova.network.neutron [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1003.777371] env[61573]: DEBUG nova.network.neutron [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.799826] env[61573]: INFO nova.compute.manager [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Took 0.83 seconds to deallocate network for instance. [ 1003.925855] env[61573]: INFO nova.scheduler.client.report [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Deleted allocations for instance c445065e-68e5-4dda-ba5d-314dc2da12cf [ 1003.950608] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1cd8aa7c-9bb0-4bf3-8bd0-3dd7074ce21f tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "c445065e-68e5-4dda-ba5d-314dc2da12cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 434.818s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.951899] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "c445065e-68e5-4dda-ba5d-314dc2da12cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 235.895s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.952137] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Acquiring lock "c445065e-68e5-4dda-ba5d-314dc2da12cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.952371] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "c445065e-68e5-4dda-ba5d-314dc2da12cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.952547] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "c445065e-68e5-4dda-ba5d-314dc2da12cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.955472] env[61573]: INFO nova.compute.manager [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Terminating instance [ 1003.957374] env[61573]: DEBUG nova.compute.manager [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1003.957569] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1003.958206] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b81c60ef-a973-49a7-8ea0-66c5b1ced3d3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.968097] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d64be9-105d-4bfa-94ea-dacdfa79e0d4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.980360] env[61573]: DEBUG nova.compute.manager [None req-d0550374-d99f-4373-9a88-7d2fda0a3bcd tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: 79a02472-a364-42f6-9fe0-c030df8436b8] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1004.011756] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c445065e-68e5-4dda-ba5d-314dc2da12cf could not be found. [ 1004.012047] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1004.012245] env[61573]: INFO nova.compute.manager [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1004.012665] env[61573]: DEBUG oslo.service.loopingcall [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.012799] env[61573]: DEBUG nova.compute.manager [-] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1004.012892] env[61573]: DEBUG nova.network.neutron [-] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1004.016278] env[61573]: DEBUG nova.compute.manager [None req-d0550374-d99f-4373-9a88-7d2fda0a3bcd tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: 79a02472-a364-42f6-9fe0-c030df8436b8] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1004.047221] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d0550374-d99f-4373-9a88-7d2fda0a3bcd tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "79a02472-a364-42f6-9fe0-c030df8436b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.893s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.053757] env[61573]: DEBUG nova.network.neutron [-] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.058088] env[61573]: DEBUG nova.compute.manager [None req-5d49b678-8da3-42ff-b97a-20a86535fb5e tempest-ServersAaction247Test-1702220296 tempest-ServersAaction247Test-1702220296-project-member] [instance: 80e12c49-e98a-4ece-a080-783f99fccabc] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1004.066175] env[61573]: INFO nova.compute.manager [-] [instance: c445065e-68e5-4dda-ba5d-314dc2da12cf] Took 0.05 seconds to deallocate network for instance. [ 1004.088758] env[61573]: DEBUG nova.compute.manager [None req-5d49b678-8da3-42ff-b97a-20a86535fb5e tempest-ServersAaction247Test-1702220296 tempest-ServersAaction247Test-1702220296-project-member] [instance: 80e12c49-e98a-4ece-a080-783f99fccabc] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1004.135280] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5d49b678-8da3-42ff-b97a-20a86535fb5e tempest-ServersAaction247Test-1702220296 tempest-ServersAaction247Test-1702220296-project-member] Lock "80e12c49-e98a-4ece-a080-783f99fccabc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.654s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.151713] env[61573]: DEBUG nova.compute.manager [None req-f40cc3fd-3490-48ab-8e08-202fc3cc4f27 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: 57358b9b-2bf1-47d9-a28c-7d45770604d9] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1004.187033] env[61573]: DEBUG nova.compute.manager [None req-f40cc3fd-3490-48ab-8e08-202fc3cc4f27 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: 57358b9b-2bf1-47d9-a28c-7d45770604d9] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1004.213053] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9be474b0-0c5f-41cc-b12a-3b88e551fb76 tempest-ImagesOneServerTestJSON-2031776308 tempest-ImagesOneServerTestJSON-2031776308-project-member] Lock "c445065e-68e5-4dda-ba5d-314dc2da12cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.260s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.220823] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f40cc3fd-3490-48ab-8e08-202fc3cc4f27 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "57358b9b-2bf1-47d9-a28c-7d45770604d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.564s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.235359] env[61573]: DEBUG nova.compute.manager [None req-da3bd14a-c736-477d-86bd-ea11a1c316d4 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] [instance: 5c09f011-1f2b-41f9-b1e3-1dee4e5b85d8] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1004.264121] env[61573]: DEBUG nova.compute.manager [None req-da3bd14a-c736-477d-86bd-ea11a1c316d4 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] [instance: 5c09f011-1f2b-41f9-b1e3-1dee4e5b85d8] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1004.293787] env[61573]: DEBUG oslo_concurrency.lockutils [None req-da3bd14a-c736-477d-86bd-ea11a1c316d4 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] Lock "5c09f011-1f2b-41f9-b1e3-1dee4e5b85d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.033s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.305648] env[61573]: DEBUG nova.compute.manager [None req-05f06731-ecad-4b60-8411-5a9f29749070 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] [instance: 5e637ba3-4faf-45f0-9454-dc38d14756c5] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1004.336300] env[61573]: DEBUG nova.compute.manager [None req-05f06731-ecad-4b60-8411-5a9f29749070 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] [instance: 5e637ba3-4faf-45f0-9454-dc38d14756c5] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1004.374013] env[61573]: DEBUG oslo_concurrency.lockutils [None req-05f06731-ecad-4b60-8411-5a9f29749070 tempest-ServerRescueNegativeTestJSON-1687451560 tempest-ServerRescueNegativeTestJSON-1687451560-project-member] Lock "5e637ba3-4faf-45f0-9454-dc38d14756c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.872s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.388157] env[61573]: DEBUG nova.compute.manager [None req-d6bed70b-24d5-4654-a6d2-9c055867ccb6 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] [instance: 18ee5e56-b3d5-4152-a825-d2f814589d43] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1004.418825] env[61573]: DEBUG nova.compute.manager [None req-d6bed70b-24d5-4654-a6d2-9c055867ccb6 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] [instance: 18ee5e56-b3d5-4152-a825-d2f814589d43] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1004.446180] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6bed70b-24d5-4654-a6d2-9c055867ccb6 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] Lock "18ee5e56-b3d5-4152-a825-d2f814589d43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.021s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.459379] env[61573]: DEBUG nova.compute.manager [None req-c3b1203b-9ac7-4cca-9b16-828c00c8a855 tempest-ServersTestManualDisk-2129984535 tempest-ServersTestManualDisk-2129984535-project-member] [instance: 5be2b145-136b-4a1b-aafc-024180e9c398] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1004.488190] env[61573]: DEBUG nova.compute.manager [None req-c3b1203b-9ac7-4cca-9b16-828c00c8a855 tempest-ServersTestManualDisk-2129984535 tempest-ServersTestManualDisk-2129984535-project-member] [instance: 5be2b145-136b-4a1b-aafc-024180e9c398] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1004.512499] env[61573]: DEBUG oslo_concurrency.lockutils [None req-c3b1203b-9ac7-4cca-9b16-828c00c8a855 tempest-ServersTestManualDisk-2129984535 tempest-ServersTestManualDisk-2129984535-project-member] Lock "5be2b145-136b-4a1b-aafc-024180e9c398" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.288s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.527951] env[61573]: DEBUG nova.compute.manager [None req-d4a1a92f-92d6-4558-a68f-0cef537686e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: 42a7b548-07fb-496d-b527-3a7528321a50] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1004.561801] env[61573]: DEBUG nova.compute.manager [None req-d4a1a92f-92d6-4558-a68f-0cef537686e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: 42a7b548-07fb-496d-b527-3a7528321a50] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1004.588627] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d4a1a92f-92d6-4558-a68f-0cef537686e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "42a7b548-07fb-496d-b527-3a7528321a50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.924s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.603268] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1004.682568] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.683313] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.684729] env[61573]: INFO nova.compute.claims [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1005.134246] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d691b1-b6c5-4b31-99ef-7cd07b16235c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.146968] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738e6562-00d7-4861-a14b-dd485537f4f4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.182200] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1776ec-473e-4302-8163-34a6d7f300e4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.193021] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58b0298-4ce1-4023-bdcf-cd4eb93eeb96 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.209546] env[61573]: DEBUG nova.compute.provider_tree [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.223397] env[61573]: DEBUG nova.scheduler.client.report [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1005.250302] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.565s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.250302] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1005.313262] env[61573]: DEBUG nova.compute.utils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1005.314744] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1005.314744] env[61573]: DEBUG nova.network.neutron [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1005.333118] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1005.442334] env[61573]: DEBUG nova.policy [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c118500563d4244b7a72deaba0478f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c510d515c28b4abb9f0dcc76c0032b88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1005.446193] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1005.477066] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1005.477500] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1005.477500] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1005.477680] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1005.478537] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1005.478537] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1005.478537] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1005.478537] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1005.478537] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1005.478748] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1005.478886] env[61573]: DEBUG nova.virt.hardware [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1005.479804] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ac94bb-6a40-4953-8783-382286137ddb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.495154] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0f733e81-23b8-4102-b11a-a41f841bc8e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "546a2a2f-fca3-410f-88c0-f71a820fd2bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.495723] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0f733e81-23b8-4102-b11a-a41f841bc8e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "546a2a2f-fca3-410f-88c0-f71a820fd2bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.496891] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58dba25d-fa73-494a-a6dd-73ca696a575b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.266766] env[61573]: DEBUG nova.network.neutron [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Successfully created port: b8374538-f48f-4133-a2bf-9b6d233c7f24 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1007.432234] env[61573]: DEBUG nova.network.neutron [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Successfully updated port: b8374538-f48f-4133-a2bf-9b6d233c7f24 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1007.443357] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "refresh_cache-54860ec5-a1ff-4d7d-ae70-769f8fad731b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.443475] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquired lock "refresh_cache-54860ec5-a1ff-4d7d-ae70-769f8fad731b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.443656] env[61573]: DEBUG nova.network.neutron [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1007.534757] env[61573]: DEBUG nova.compute.manager [req-5843d1ca-eac3-4b01-b733-2aa4b779cc55 req-859e4ae2-da49-43ff-a210-9ffa9dc2d2c4 service nova] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Received event network-vif-plugged-b8374538-f48f-4133-a2bf-9b6d233c7f24 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1007.534987] env[61573]: DEBUG oslo_concurrency.lockutils [req-5843d1ca-eac3-4b01-b733-2aa4b779cc55 req-859e4ae2-da49-43ff-a210-9ffa9dc2d2c4 service nova] Acquiring lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.535248] env[61573]: DEBUG oslo_concurrency.lockutils [req-5843d1ca-eac3-4b01-b733-2aa4b779cc55 req-859e4ae2-da49-43ff-a210-9ffa9dc2d2c4 service nova] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.535428] env[61573]: DEBUG oslo_concurrency.lockutils [req-5843d1ca-eac3-4b01-b733-2aa4b779cc55 req-859e4ae2-da49-43ff-a210-9ffa9dc2d2c4 service nova] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.535599] env[61573]: DEBUG nova.compute.manager [req-5843d1ca-eac3-4b01-b733-2aa4b779cc55 req-859e4ae2-da49-43ff-a210-9ffa9dc2d2c4 service nova] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] No waiting events found dispatching network-vif-plugged-b8374538-f48f-4133-a2bf-9b6d233c7f24 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1007.535763] env[61573]: WARNING nova.compute.manager [req-5843d1ca-eac3-4b01-b733-2aa4b779cc55 req-859e4ae2-da49-43ff-a210-9ffa9dc2d2c4 service nova] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Received unexpected event network-vif-plugged-b8374538-f48f-4133-a2bf-9b6d233c7f24 for instance with vm_state building and task_state spawning. [ 1007.546394] env[61573]: DEBUG nova.network.neutron [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1007.927601] env[61573]: DEBUG nova.network.neutron [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Updating instance_info_cache with network_info: [{"id": "b8374538-f48f-4133-a2bf-9b6d233c7f24", "address": "fa:16:3e:a5:2f:63", "network": {"id": "9d2b4743-4d63-4207-a1a9-7ef492323eb4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-231571745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c510d515c28b4abb9f0dcc76c0032b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8374538-f4", "ovs_interfaceid": "b8374538-f48f-4133-a2bf-9b6d233c7f24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.943795] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Releasing lock "refresh_cache-54860ec5-a1ff-4d7d-ae70-769f8fad731b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.944234] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Instance network_info: |[{"id": "b8374538-f48f-4133-a2bf-9b6d233c7f24", "address": "fa:16:3e:a5:2f:63", "network": {"id": "9d2b4743-4d63-4207-a1a9-7ef492323eb4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-231571745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c510d515c28b4abb9f0dcc76c0032b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8374538-f4", "ovs_interfaceid": "b8374538-f48f-4133-a2bf-9b6d233c7f24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1007.944934] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:2f:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '777870ab-362f-4a17-9c1c-8d9cc26cd4ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8374538-f48f-4133-a2bf-9b6d233c7f24', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.956200] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Creating folder: Project (c510d515c28b4abb9f0dcc76c0032b88). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1007.957031] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60f411c9-815e-415a-a019-f92906226388 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.971369] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Created folder: Project (c510d515c28b4abb9f0dcc76c0032b88) in parent group-v942801. [ 1007.971720] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Creating folder: Instances. Parent ref: group-v942858. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1007.972049] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6f43dc2-7459-4e2b-b92a-cc1ecb0e1670 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.983334] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Created folder: Instances in parent group-v942858. [ 1007.983576] env[61573]: DEBUG oslo.service.loopingcall [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1007.983768] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1007.983993] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-847b44f6-c332-4409-95ab-2abe6be02ced {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.005727] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1008.005727] env[61573]: value = "task-4836801" [ 1008.005727] env[61573]: _type = "Task" [ 1008.005727] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.014334] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836801, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.516689] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836801, 'name': CreateVM_Task, 'duration_secs': 0.393845} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.516964] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1008.517559] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.517725] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.518052] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1008.518308] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e21671d-7a1a-4779-b0f0-58bcc824f044 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.526172] env[61573]: DEBUG oslo_vmware.api [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Waiting for the task: (returnval){ [ 1008.526172] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b7168e-aea5-085f-eeee-7167250b14bf" [ 1008.526172] env[61573]: _type = "Task" [ 1008.526172] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.535263] env[61573]: DEBUG oslo_vmware.api [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b7168e-aea5-085f-eeee-7167250b14bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.037820] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.038321] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1009.038321] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.607642] env[61573]: DEBUG nova.compute.manager [req-78f08744-f767-4926-8a1b-7de605f8e291 req-42daed21-eab4-4f55-8bcb-6d1554e4df66 service nova] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Received event network-changed-b8374538-f48f-4133-a2bf-9b6d233c7f24 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1009.608218] env[61573]: DEBUG nova.compute.manager [req-78f08744-f767-4926-8a1b-7de605f8e291 req-42daed21-eab4-4f55-8bcb-6d1554e4df66 service nova] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Refreshing instance network info cache due to event network-changed-b8374538-f48f-4133-a2bf-9b6d233c7f24. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1009.608610] env[61573]: DEBUG oslo_concurrency.lockutils [req-78f08744-f767-4926-8a1b-7de605f8e291 req-42daed21-eab4-4f55-8bcb-6d1554e4df66 service nova] Acquiring lock "refresh_cache-54860ec5-a1ff-4d7d-ae70-769f8fad731b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.608806] env[61573]: DEBUG oslo_concurrency.lockutils [req-78f08744-f767-4926-8a1b-7de605f8e291 req-42daed21-eab4-4f55-8bcb-6d1554e4df66 service nova] Acquired lock "refresh_cache-54860ec5-a1ff-4d7d-ae70-769f8fad731b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.609084] env[61573]: DEBUG nova.network.neutron [req-78f08744-f767-4926-8a1b-7de605f8e291 req-42daed21-eab4-4f55-8bcb-6d1554e4df66 service nova] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Refreshing network info cache for port b8374538-f48f-4133-a2bf-9b6d233c7f24 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1010.059613] env[61573]: DEBUG nova.network.neutron [req-78f08744-f767-4926-8a1b-7de605f8e291 req-42daed21-eab4-4f55-8bcb-6d1554e4df66 service nova] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Updated VIF entry in instance network info cache for port b8374538-f48f-4133-a2bf-9b6d233c7f24. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1010.060105] env[61573]: DEBUG nova.network.neutron [req-78f08744-f767-4926-8a1b-7de605f8e291 req-42daed21-eab4-4f55-8bcb-6d1554e4df66 service nova] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Updating instance_info_cache with network_info: [{"id": "b8374538-f48f-4133-a2bf-9b6d233c7f24", "address": "fa:16:3e:a5:2f:63", "network": {"id": "9d2b4743-4d63-4207-a1a9-7ef492323eb4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-231571745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c510d515c28b4abb9f0dcc76c0032b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8374538-f4", "ovs_interfaceid": "b8374538-f48f-4133-a2bf-9b6d233c7f24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.070123] env[61573]: DEBUG oslo_concurrency.lockutils [req-78f08744-f767-4926-8a1b-7de605f8e291 req-42daed21-eab4-4f55-8bcb-6d1554e4df66 service nova] Releasing lock "refresh_cache-54860ec5-a1ff-4d7d-ae70-769f8fad731b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.684410] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "59913660-3644-41f2-a422-f814bd69b4a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.684741] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "59913660-3644-41f2-a422-f814bd69b4a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.457614] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.786458] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "2d5777f8-a431-43bd-8934-7cc33fd14718" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.786704] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "2d5777f8-a431-43bd-8934-7cc33fd14718" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.942159] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "1605911c-cf22-4206-b911-92b2a137dc84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.942395] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "1605911c-cf22-4206-b911-92b2a137dc84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.404652] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.403605] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.403866] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1042.404545] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.415943] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.416179] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.416350] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.416500] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1042.417591] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d7654a-85c7-4ff6-a0cc-bef2946df9d8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.427041] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6807f8-48d1-46a4-a94e-518f1191eb45 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.441185] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93279cbc-9638-447e-85d7-45cda5365569 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.447907] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da50580-480e-4d78-b0f0-514c57c68f3b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.479662] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180500MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1042.479851] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.480027] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.555886] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.556111] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.556284] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.556440] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.556591] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.556744] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.556972] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.557039] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.557112] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.557224] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1042.568738] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.579529] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.590844] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.602916] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 404b990e-a6c8-4166-be41-e49d44269fc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.642614] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1eb74431-a48c-4427-97dc-a9ce4666605a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.653988] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cd4fae54-ce8d-491c-9244-a32fd88e0183 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.664609] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4cd581ab-601a-4854-8b3a-5f368b40f2c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.674570] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c93aa98a-2c79-4ae3-ba66-7b2e4b67291b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.684107] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 7b28cf4f-5ba2-4fde-8c53-7a403166ae2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.693242] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 546a2a2f-fca3-410f-88c0-f71a820fd2bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.702726] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.713675] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.726532] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1042.726876] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1042.727110] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '38', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_19a0cd3d99c04e37aba52b7de50f98c9': '1', 'io_workload': '10', 'num_proj_2bb57dd7a5d14a62bc23a9747d5d4ffe': '1', 'num_proj_6179168f3e4f4df09875705a0ee778df': '2', 'num_proj_58b3da30203f416e9b30c28a291d7b4e': '1', 'num_proj_415cc2d7de384f7ca1b8c26e32974978': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_27a816079ac04dabaf85dec4005df607': '1', 'num_proj_09c0dcd38fb64257ba2c08c59f75f097': '1', 'num_proj_c510d515c28b4abb9f0dcc76c0032b88': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1043.013974] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1471cef4-1e41-45cf-9bf2-c83bcce8b69d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.021656] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9fd99a-fbb9-47c0-b226-dbcdbe169ef8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.051929] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a73efff-f388-4df0-a76d-996ae232173f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.059900] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b888fa7-3452-4992-8dd4-fe9ea4ef5488 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.073383] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.082690] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1043.097810] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1043.097958] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.618s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.097893] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.398903] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.403616] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.403815] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1045.404012] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1045.425771] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.425877] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.426075] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.426201] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.426367] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.426500] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.426591] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.426699] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.426815] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.426928] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1045.427054] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1045.427533] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.427763] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.404237] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1051.590334] env[61573]: WARNING oslo_vmware.rw_handles [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1051.590334] env[61573]: ERROR oslo_vmware.rw_handles [ 1051.590967] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/eceafdbf-c64d-41cb-ad3d-a8b2af6ffd7e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1051.593237] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1051.593510] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Copying Virtual Disk [datastore2] vmware_temp/eceafdbf-c64d-41cb-ad3d-a8b2af6ffd7e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/eceafdbf-c64d-41cb-ad3d-a8b2af6ffd7e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1051.594195] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23a24dc5-64c9-4e05-8280-1fae961c119a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.602978] env[61573]: DEBUG oslo_vmware.api [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Waiting for the task: (returnval){ [ 1051.602978] env[61573]: value = "task-4836802" [ 1051.602978] env[61573]: _type = "Task" [ 1051.602978] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.611807] env[61573]: DEBUG oslo_vmware.api [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Task: {'id': task-4836802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.113692] env[61573]: DEBUG oslo_vmware.exceptions [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1052.114052] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.114630] env[61573]: ERROR nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1052.114630] env[61573]: Faults: ['InvalidArgument'] [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Traceback (most recent call last): [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] yield resources [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] self.driver.spawn(context, instance, image_meta, [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] self._fetch_image_if_missing(context, vi) [ 1052.114630] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] image_cache(vi, tmp_image_ds_loc) [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] vm_util.copy_virtual_disk( [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] session._wait_for_task(vmdk_copy_task) [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] return self.wait_for_task(task_ref) [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] return evt.wait() [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] result = hub.switch() [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1052.114982] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] return self.greenlet.switch() [ 1052.115345] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1052.115345] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] self.f(*self.args, **self.kw) [ 1052.115345] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1052.115345] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] raise exceptions.translate_fault(task_info.error) [ 1052.115345] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1052.115345] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Faults: ['InvalidArgument'] [ 1052.115345] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] [ 1052.115345] env[61573]: INFO nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Terminating instance [ 1052.116543] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.116752] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.117010] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b066c78-2ff6-4901-8103-1a4c447249c8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.119579] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1052.119774] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1052.120629] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252335cb-0d07-4acc-aa19-0427c9daba42 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.128439] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1052.128738] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76262296-0719-420d-866e-5a1399e4af7e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.131121] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.131302] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1052.132304] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a2355d8-6f8e-4585-bb73-30196282fc53 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.138226] env[61573]: DEBUG oslo_vmware.api [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Waiting for the task: (returnval){ [ 1052.138226] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b4fbff-86e0-c2f2-0614-1d429e1fabcc" [ 1052.138226] env[61573]: _type = "Task" [ 1052.138226] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.150581] env[61573]: DEBUG oslo_vmware.api [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b4fbff-86e0-c2f2-0614-1d429e1fabcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.206884] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1052.207135] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1052.207300] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Deleting the datastore file [datastore2] 4c70f154-7d65-4cea-ad90-8626f58b70f5 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.207613] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-697e50e0-04d7-4011-ab7c-fc1b2c649068 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.215145] env[61573]: DEBUG oslo_vmware.api [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Waiting for the task: (returnval){ [ 1052.215145] env[61573]: value = "task-4836804" [ 1052.215145] env[61573]: _type = "Task" [ 1052.215145] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.224902] env[61573]: DEBUG oslo_vmware.api [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Task: {'id': task-4836804, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.649545] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1052.649830] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Creating directory with path [datastore2] vmware_temp/facbf504-e798-42e0-af9a-8fc0f6b6fdbd/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.650051] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63212c3f-3f6c-4460-8b6b-2d19b02e9c5a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.661766] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Created directory with path [datastore2] vmware_temp/facbf504-e798-42e0-af9a-8fc0f6b6fdbd/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.661974] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Fetch image to [datastore2] vmware_temp/facbf504-e798-42e0-af9a-8fc0f6b6fdbd/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1052.662217] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/facbf504-e798-42e0-af9a-8fc0f6b6fdbd/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1052.663088] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720eff59-84c7-4017-af78-c85f6b14557f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.670564] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5855cd1-e262-479e-8990-47c9815e7c5c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.680497] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e2cb55-b2f5-4565-bbc3-590782561ed0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.712575] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c636fc10-34b8-489c-a2c1-9f71a20d3b81 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.720449] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eb8ea3a7-debe-4f8e-b50a-cbd9a9decc75 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.725040] env[61573]: DEBUG oslo_vmware.api [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Task: {'id': task-4836804, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067184} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.725647] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.725870] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1052.726095] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1052.726282] env[61573]: INFO nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1052.728411] env[61573]: DEBUG nova.compute.claims [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1052.728596] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.728812] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.749370] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1052.804371] env[61573]: DEBUG oslo_vmware.rw_handles [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/facbf504-e798-42e0-af9a-8fc0f6b6fdbd/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1052.863751] env[61573]: DEBUG oslo_vmware.rw_handles [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1052.863950] env[61573]: DEBUG oslo_vmware.rw_handles [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/facbf504-e798-42e0-af9a-8fc0f6b6fdbd/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1053.116208] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac1a300-69f3-48ae-a3cc-29563cbe12fd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.124269] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed1cd22-3ab0-4c9c-945b-8f79f419f19b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.154809] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ffe86c-61ad-4d47-9dbe-3f0d2ffb0ac7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.162841] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ada5b8-1009-486a-ab8c-8d0f72befab7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.176720] env[61573]: DEBUG nova.compute.provider_tree [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.206930] env[61573]: DEBUG nova.scheduler.client.report [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1053.222559] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.494s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.223172] env[61573]: ERROR nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1053.223172] env[61573]: Faults: ['InvalidArgument'] [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Traceback (most recent call last): [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] self.driver.spawn(context, instance, image_meta, [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] self._fetch_image_if_missing(context, vi) [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] image_cache(vi, tmp_image_ds_loc) [ 1053.223172] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] vm_util.copy_virtual_disk( [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] session._wait_for_task(vmdk_copy_task) [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] return self.wait_for_task(task_ref) [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] return evt.wait() [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] result = hub.switch() [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] return self.greenlet.switch() [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1053.223527] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] self.f(*self.args, **self.kw) [ 1053.223928] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1053.223928] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] raise exceptions.translate_fault(task_info.error) [ 1053.223928] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1053.223928] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Faults: ['InvalidArgument'] [ 1053.223928] env[61573]: ERROR nova.compute.manager [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] [ 1053.223928] env[61573]: DEBUG nova.compute.utils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1053.225953] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Build of instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 was re-scheduled: A specified parameter was not correct: fileType [ 1053.225953] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1053.226572] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1053.226833] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1053.227105] env[61573]: DEBUG nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1053.227249] env[61573]: DEBUG nova.network.neutron [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1053.678428] env[61573]: DEBUG nova.network.neutron [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.695637] env[61573]: INFO nova.compute.manager [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Took 0.47 seconds to deallocate network for instance. [ 1053.797560] env[61573]: INFO nova.scheduler.client.report [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Deleted allocations for instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 [ 1053.818088] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5592dc05-dc0c-4d91-8fc5-ee30f9616d18 tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "4c70f154-7d65-4cea-ad90-8626f58b70f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 482.608s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.820643] env[61573]: DEBUG oslo_concurrency.lockutils [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "4c70f154-7d65-4cea-ad90-8626f58b70f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 284.329s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.820643] env[61573]: DEBUG oslo_concurrency.lockutils [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Acquiring lock "4c70f154-7d65-4cea-ad90-8626f58b70f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.820643] env[61573]: DEBUG oslo_concurrency.lockutils [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "4c70f154-7d65-4cea-ad90-8626f58b70f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.820937] env[61573]: DEBUG oslo_concurrency.lockutils [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "4c70f154-7d65-4cea-ad90-8626f58b70f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.825874] env[61573]: INFO nova.compute.manager [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Terminating instance [ 1053.827921] env[61573]: DEBUG nova.compute.manager [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1053.828094] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1053.828366] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b5362326-0286-4b75-aeaa-e3cf552bfd92 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.838830] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9a42d3-11d9-4b38-900e-72a62c0ab033 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.849965] env[61573]: DEBUG nova.compute.manager [None req-ccf7beb8-926d-4fbf-b897-eb35f310c7ab tempest-ServersTestJSON-566902269 tempest-ServersTestJSON-566902269-project-member] [instance: b2eba140-db65-403e-9abf-58a6737bf853] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1053.875452] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4c70f154-7d65-4cea-ad90-8626f58b70f5 could not be found. [ 1053.875683] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1053.875872] env[61573]: INFO nova.compute.manager [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1053.876453] env[61573]: DEBUG oslo.service.loopingcall [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1053.877264] env[61573]: DEBUG nova.compute.manager [-] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1053.877366] env[61573]: DEBUG nova.network.neutron [-] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1053.879156] env[61573]: DEBUG nova.compute.manager [None req-ccf7beb8-926d-4fbf-b897-eb35f310c7ab tempest-ServersTestJSON-566902269 tempest-ServersTestJSON-566902269-project-member] [instance: b2eba140-db65-403e-9abf-58a6737bf853] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1053.900149] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ccf7beb8-926d-4fbf-b897-eb35f310c7ab tempest-ServersTestJSON-566902269 tempest-ServersTestJSON-566902269-project-member] Lock "b2eba140-db65-403e-9abf-58a6737bf853" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.850s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.904135] env[61573]: DEBUG nova.network.neutron [-] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.908222] env[61573]: DEBUG nova.compute.manager [None req-564e56ff-7943-4e63-97d6-9518c35b681b tempest-ServerAddressesTestJSON-1771230949 tempest-ServerAddressesTestJSON-1771230949-project-member] [instance: b2654514-d2bb-4c48-b351-b449e2044ddc] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1053.911846] env[61573]: INFO nova.compute.manager [-] [instance: 4c70f154-7d65-4cea-ad90-8626f58b70f5] Took 0.03 seconds to deallocate network for instance. [ 1053.933370] env[61573]: DEBUG nova.compute.manager [None req-564e56ff-7943-4e63-97d6-9518c35b681b tempest-ServerAddressesTestJSON-1771230949 tempest-ServerAddressesTestJSON-1771230949-project-member] [instance: b2654514-d2bb-4c48-b351-b449e2044ddc] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1053.956604] env[61573]: DEBUG oslo_concurrency.lockutils [None req-564e56ff-7943-4e63-97d6-9518c35b681b tempest-ServerAddressesTestJSON-1771230949 tempest-ServerAddressesTestJSON-1771230949-project-member] Lock "b2654514-d2bb-4c48-b351-b449e2044ddc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.948s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.966388] env[61573]: DEBUG nova.compute.manager [None req-13400ed2-7993-42c7-b817-76e7cd3c2d77 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 90be4957-04a8-40a1-a0ac-0cbaed2b1086] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1053.994393] env[61573]: DEBUG nova.compute.manager [None req-13400ed2-7993-42c7-b817-76e7cd3c2d77 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 90be4957-04a8-40a1-a0ac-0cbaed2b1086] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1054.013355] env[61573]: DEBUG oslo_concurrency.lockutils [None req-288ca295-844d-4e7a-8e91-4abba47126ec tempest-ServerMetadataNegativeTestJSON-587236571 tempest-ServerMetadataNegativeTestJSON-587236571-project-member] Lock "4c70f154-7d65-4cea-ad90-8626f58b70f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.019979] env[61573]: DEBUG oslo_concurrency.lockutils [None req-13400ed2-7993-42c7-b817-76e7cd3c2d77 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "90be4957-04a8-40a1-a0ac-0cbaed2b1086" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.122s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.029185] env[61573]: DEBUG nova.compute.manager [None req-182c5fc1-08f4-483a-b5e2-462c6b5a9987 tempest-ServerGroupTestJSON-1991982297 tempest-ServerGroupTestJSON-1991982297-project-member] [instance: 0cecb88d-2c13-4171-9291-204d26979697] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1054.051558] env[61573]: DEBUG nova.compute.manager [None req-182c5fc1-08f4-483a-b5e2-462c6b5a9987 tempest-ServerGroupTestJSON-1991982297 tempest-ServerGroupTestJSON-1991982297-project-member] [instance: 0cecb88d-2c13-4171-9291-204d26979697] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1054.072820] env[61573]: DEBUG oslo_concurrency.lockutils [None req-182c5fc1-08f4-483a-b5e2-462c6b5a9987 tempest-ServerGroupTestJSON-1991982297 tempest-ServerGroupTestJSON-1991982297-project-member] Lock "0cecb88d-2c13-4171-9291-204d26979697" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.053s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.082346] env[61573]: DEBUG nova.compute.manager [None req-61bb472e-865b-4052-a90c-98fab4030e91 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] [instance: d5f7e49d-4444-4131-89c1-e4abdd9c1e49] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1054.110460] env[61573]: DEBUG nova.compute.manager [None req-61bb472e-865b-4052-a90c-98fab4030e91 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] [instance: d5f7e49d-4444-4131-89c1-e4abdd9c1e49] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1054.133460] env[61573]: DEBUG oslo_concurrency.lockutils [None req-61bb472e-865b-4052-a90c-98fab4030e91 tempest-SecurityGroupsTestJSON-510587562 tempest-SecurityGroupsTestJSON-510587562-project-member] Lock "d5f7e49d-4444-4131-89c1-e4abdd9c1e49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.632s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.142714] env[61573]: DEBUG nova.compute.manager [None req-ca4d8de4-5e94-4b8d-92b8-d3b5158b3701 tempest-ServerActionsTestJSON-2137638428 tempest-ServerActionsTestJSON-2137638428-project-member] [instance: f13f76f6-d764-4672-9a7d-37d687605348] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1054.166783] env[61573]: DEBUG nova.compute.manager [None req-ca4d8de4-5e94-4b8d-92b8-d3b5158b3701 tempest-ServerActionsTestJSON-2137638428 tempest-ServerActionsTestJSON-2137638428-project-member] [instance: f13f76f6-d764-4672-9a7d-37d687605348] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1054.190114] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca4d8de4-5e94-4b8d-92b8-d3b5158b3701 tempest-ServerActionsTestJSON-2137638428 tempest-ServerActionsTestJSON-2137638428-project-member] Lock "f13f76f6-d764-4672-9a7d-37d687605348" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.597s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.202967] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1054.256299] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.256554] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.258060] env[61573]: INFO nova.compute.claims [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1054.581033] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc75d82-9ed6-4662-81aa-d678750ebde7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.588658] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02a723a-ff8b-4dba-a59b-cd22d4af1091 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.625557] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f647feba-a624-411e-a266-d997f45f9899 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.635202] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d712a4-a9f4-45d1-acff-c6cf1c1eeb88 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.650565] env[61573]: DEBUG nova.compute.provider_tree [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.662889] env[61573]: DEBUG nova.scheduler.client.report [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1054.681787] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.425s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.682497] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1054.725756] env[61573]: DEBUG nova.compute.utils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1054.727222] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1054.727428] env[61573]: DEBUG nova.network.neutron [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1054.739246] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1054.809357] env[61573]: DEBUG nova.policy [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15e5b48486b54f11a518aae9d8de198b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ebf7b3bebe84e8f8a46532726f8935b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1054.813276] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1054.843339] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1054.843596] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1054.843776] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1054.844075] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1054.844199] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1054.844530] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1054.844530] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1054.844700] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1054.844883] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1054.845080] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1054.845262] env[61573]: DEBUG nova.virt.hardware [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1054.846358] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1449ac6b-0808-4773-8c5b-63c927be92dd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.855355] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ae8eb7-395c-4ca2-aaa6-f1f57fde4a73 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.184577] env[61573]: DEBUG nova.network.neutron [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Successfully created port: a3c43958-e0e7-442f-a5e9-48cdb50dcdcd {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.047099] env[61573]: DEBUG nova.network.neutron [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Successfully updated port: a3c43958-e0e7-442f-a5e9-48cdb50dcdcd {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1056.057780] env[61573]: DEBUG nova.compute.manager [req-94fbf696-3b02-453a-acd7-c419b110edbd req-580c96e4-cc8d-4fd2-a90e-fdba58120571 service nova] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Received event network-vif-plugged-a3c43958-e0e7-442f-a5e9-48cdb50dcdcd {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1056.057928] env[61573]: DEBUG oslo_concurrency.lockutils [req-94fbf696-3b02-453a-acd7-c419b110edbd req-580c96e4-cc8d-4fd2-a90e-fdba58120571 service nova] Acquiring lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.058151] env[61573]: DEBUG oslo_concurrency.lockutils [req-94fbf696-3b02-453a-acd7-c419b110edbd req-580c96e4-cc8d-4fd2-a90e-fdba58120571 service nova] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.058317] env[61573]: DEBUG oslo_concurrency.lockutils [req-94fbf696-3b02-453a-acd7-c419b110edbd req-580c96e4-cc8d-4fd2-a90e-fdba58120571 service nova] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.058477] env[61573]: DEBUG nova.compute.manager [req-94fbf696-3b02-453a-acd7-c419b110edbd req-580c96e4-cc8d-4fd2-a90e-fdba58120571 service nova] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] No waiting events found dispatching network-vif-plugged-a3c43958-e0e7-442f-a5e9-48cdb50dcdcd {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1056.058638] env[61573]: WARNING nova.compute.manager [req-94fbf696-3b02-453a-acd7-c419b110edbd req-580c96e4-cc8d-4fd2-a90e-fdba58120571 service nova] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Received unexpected event network-vif-plugged-a3c43958-e0e7-442f-a5e9-48cdb50dcdcd for instance with vm_state building and task_state spawning. [ 1056.064207] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquiring lock "refresh_cache-8d624aa8-c52f-4d3b-bb7e-fac412249b97" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.064347] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquired lock "refresh_cache-8d624aa8-c52f-4d3b-bb7e-fac412249b97" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.064491] env[61573]: DEBUG nova.network.neutron [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1056.126027] env[61573]: DEBUG nova.network.neutron [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1056.323038] env[61573]: DEBUG nova.network.neutron [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Updating instance_info_cache with network_info: [{"id": "a3c43958-e0e7-442f-a5e9-48cdb50dcdcd", "address": "fa:16:3e:9d:ea:0d", "network": {"id": "aa6b246f-71e7-4ef6-ac22-09a36b5c7420", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1103078229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ebf7b3bebe84e8f8a46532726f8935b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3c43958-e0", "ovs_interfaceid": "a3c43958-e0e7-442f-a5e9-48cdb50dcdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.339619] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Releasing lock "refresh_cache-8d624aa8-c52f-4d3b-bb7e-fac412249b97" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.340165] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Instance network_info: |[{"id": "a3c43958-e0e7-442f-a5e9-48cdb50dcdcd", "address": "fa:16:3e:9d:ea:0d", "network": {"id": "aa6b246f-71e7-4ef6-ac22-09a36b5c7420", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1103078229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ebf7b3bebe84e8f8a46532726f8935b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3c43958-e0", "ovs_interfaceid": "a3c43958-e0e7-442f-a5e9-48cdb50dcdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1056.341102] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:ea:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3c43958-e0e7-442f-a5e9-48cdb50dcdcd', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.348582] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Creating folder: Project (4ebf7b3bebe84e8f8a46532726f8935b). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1056.349523] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ac3a859-7d03-42b5-9772-7026194df097 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.365122] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Created folder: Project (4ebf7b3bebe84e8f8a46532726f8935b) in parent group-v942801. [ 1056.365375] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Creating folder: Instances. Parent ref: group-v942861. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1056.365593] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42c2f46e-72d7-46a6-9192-6f2b0154febd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.375573] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Created folder: Instances in parent group-v942861. [ 1056.375832] env[61573]: DEBUG oslo.service.loopingcall [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.376043] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1056.376264] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b42a1099-89c9-40b2-9487-32c4f00f1b47 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.398567] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.398567] env[61573]: value = "task-4836807" [ 1056.398567] env[61573]: _type = "Task" [ 1056.398567] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.407932] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836807, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.910672] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836807, 'name': CreateVM_Task, 'duration_secs': 0.308409} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.910845] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1056.911544] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.911713] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.912060] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1056.912315] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d86edb51-98a5-4c0b-a9f6-6fb55f2363a1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.917373] env[61573]: DEBUG oslo_vmware.api [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Waiting for the task: (returnval){ [ 1056.917373] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52e82930-c830-c386-bc8f-3022d582eeee" [ 1056.917373] env[61573]: _type = "Task" [ 1056.917373] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.925390] env[61573]: DEBUG oslo_vmware.api [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52e82930-c830-c386-bc8f-3022d582eeee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.428389] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.428696] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.428918] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.181476] env[61573]: DEBUG nova.compute.manager [req-c6824a5e-d4c3-4c09-881d-152ddfc40d2f req-ae4b1c4c-8e39-4938-8f75-1d8b8e956cd3 service nova] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Received event network-changed-a3c43958-e0e7-442f-a5e9-48cdb50dcdcd {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1058.181476] env[61573]: DEBUG nova.compute.manager [req-c6824a5e-d4c3-4c09-881d-152ddfc40d2f req-ae4b1c4c-8e39-4938-8f75-1d8b8e956cd3 service nova] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Refreshing instance network info cache due to event network-changed-a3c43958-e0e7-442f-a5e9-48cdb50dcdcd. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1058.181553] env[61573]: DEBUG oslo_concurrency.lockutils [req-c6824a5e-d4c3-4c09-881d-152ddfc40d2f req-ae4b1c4c-8e39-4938-8f75-1d8b8e956cd3 service nova] Acquiring lock "refresh_cache-8d624aa8-c52f-4d3b-bb7e-fac412249b97" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.181732] env[61573]: DEBUG oslo_concurrency.lockutils [req-c6824a5e-d4c3-4c09-881d-152ddfc40d2f req-ae4b1c4c-8e39-4938-8f75-1d8b8e956cd3 service nova] Acquired lock "refresh_cache-8d624aa8-c52f-4d3b-bb7e-fac412249b97" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.181936] env[61573]: DEBUG nova.network.neutron [req-c6824a5e-d4c3-4c09-881d-152ddfc40d2f req-ae4b1c4c-8e39-4938-8f75-1d8b8e956cd3 service nova] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Refreshing network info cache for port a3c43958-e0e7-442f-a5e9-48cdb50dcdcd {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1058.724290] env[61573]: DEBUG nova.network.neutron [req-c6824a5e-d4c3-4c09-881d-152ddfc40d2f req-ae4b1c4c-8e39-4938-8f75-1d8b8e956cd3 service nova] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Updated VIF entry in instance network info cache for port a3c43958-e0e7-442f-a5e9-48cdb50dcdcd. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1058.724726] env[61573]: DEBUG nova.network.neutron [req-c6824a5e-d4c3-4c09-881d-152ddfc40d2f req-ae4b1c4c-8e39-4938-8f75-1d8b8e956cd3 service nova] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Updating instance_info_cache with network_info: [{"id": "a3c43958-e0e7-442f-a5e9-48cdb50dcdcd", "address": "fa:16:3e:9d:ea:0d", "network": {"id": "aa6b246f-71e7-4ef6-ac22-09a36b5c7420", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1103078229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ebf7b3bebe84e8f8a46532726f8935b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3c43958-e0", "ovs_interfaceid": "a3c43958-e0e7-442f-a5e9-48cdb50dcdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.736127] env[61573]: DEBUG oslo_concurrency.lockutils [req-c6824a5e-d4c3-4c09-881d-152ddfc40d2f req-ae4b1c4c-8e39-4938-8f75-1d8b8e956cd3 service nova] Releasing lock "refresh_cache-8d624aa8-c52f-4d3b-bb7e-fac412249b97" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.231235] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "4b512941-2180-44a7-a69d-b54e57856cb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.231651] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "4b512941-2180-44a7-a69d-b54e57856cb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.404041] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1096.404041] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances with incomplete migration {{(pid=61573) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11333}} [ 1098.405109] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.405413] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11295}} [ 1098.418470] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] There are 0 instances to clean {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11304}} [ 1098.418470] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.419305] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.403838] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.522062] env[61573]: WARNING oslo_vmware.rw_handles [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1102.522062] env[61573]: ERROR oslo_vmware.rw_handles [ 1102.522062] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/facbf504-e798-42e0-af9a-8fc0f6b6fdbd/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1102.524086] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1102.524384] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Copying Virtual Disk [datastore2] vmware_temp/facbf504-e798-42e0-af9a-8fc0f6b6fdbd/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/facbf504-e798-42e0-af9a-8fc0f6b6fdbd/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1102.524691] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45b802b1-66d0-4a43-8fdd-c0a928d2cf9a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.533467] env[61573]: DEBUG oslo_vmware.api [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Waiting for the task: (returnval){ [ 1102.533467] env[61573]: value = "task-4836808" [ 1102.533467] env[61573]: _type = "Task" [ 1102.533467] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.541864] env[61573]: DEBUG oslo_vmware.api [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Task: {'id': task-4836808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.045272] env[61573]: DEBUG oslo_vmware.exceptions [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1103.045620] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.046262] env[61573]: ERROR nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1103.046262] env[61573]: Faults: ['InvalidArgument'] [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Traceback (most recent call last): [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] yield resources [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] self.driver.spawn(context, instance, image_meta, [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] self._fetch_image_if_missing(context, vi) [ 1103.046262] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] image_cache(vi, tmp_image_ds_loc) [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] vm_util.copy_virtual_disk( [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] session._wait_for_task(vmdk_copy_task) [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] return self.wait_for_task(task_ref) [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] return evt.wait() [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] result = hub.switch() [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1103.046659] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] return self.greenlet.switch() [ 1103.047061] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1103.047061] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] self.f(*self.args, **self.kw) [ 1103.047061] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1103.047061] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] raise exceptions.translate_fault(task_info.error) [ 1103.047061] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1103.047061] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Faults: ['InvalidArgument'] [ 1103.047061] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] [ 1103.047061] env[61573]: INFO nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Terminating instance [ 1103.051059] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1103.051298] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1103.051619] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.051855] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1103.052763] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efb66b2-6310-4091-9973-9de6362e9c65 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.055459] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c837c29-dd35-4d06-8ccf-47f055557d86 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.062406] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1103.062731] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06b35c86-6e0a-48f6-99ef-89ddbab18074 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.065104] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1103.065316] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1103.066414] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-472050e8-463a-4248-bc81-95d50452764c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.071456] env[61573]: DEBUG oslo_vmware.api [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for the task: (returnval){ [ 1103.071456] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52e0e473-f713-c631-850f-a7c4161f4c20" [ 1103.071456] env[61573]: _type = "Task" [ 1103.071456] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.079063] env[61573]: DEBUG oslo_vmware.api [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52e0e473-f713-c631-850f-a7c4161f4c20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.134360] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1103.134574] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1103.134755] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Deleting the datastore file [datastore2] f63a7f37-09ac-4fe8-a1a3-7e13eb158526 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1103.135078] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c2ebbfa-7c04-4feb-a35e-3c3213cfdb9d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.141836] env[61573]: DEBUG oslo_vmware.api [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Waiting for the task: (returnval){ [ 1103.141836] env[61573]: value = "task-4836810" [ 1103.141836] env[61573]: _type = "Task" [ 1103.141836] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.151210] env[61573]: DEBUG oslo_vmware.api [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Task: {'id': task-4836810, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.404188] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.404369] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1103.581383] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1103.581751] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Creating directory with path [datastore2] vmware_temp/0a98e3b6-f2bc-4b2e-a335-00f35949779c/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1103.581918] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-830a31a8-9419-4751-89c4-ca237b465e66 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.593270] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Created directory with path [datastore2] vmware_temp/0a98e3b6-f2bc-4b2e-a335-00f35949779c/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1103.593461] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Fetch image to [datastore2] vmware_temp/0a98e3b6-f2bc-4b2e-a335-00f35949779c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1103.593664] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/0a98e3b6-f2bc-4b2e-a335-00f35949779c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1103.594440] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dfce40-41d1-4a6a-97a8-1978ab3b700a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.601325] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60afc883-fac6-42c6-9166-bf2425165d96 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.610416] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8e4ad4-fe93-4fb7-8aaf-4d069808a66c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.641081] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522bca59-0e0d-4971-be0a-3fa4ce858cb0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.651876] env[61573]: DEBUG oslo_vmware.api [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Task: {'id': task-4836810, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078894} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.653441] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1103.653663] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1103.653869] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1103.654064] env[61573]: INFO nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1103.656577] env[61573]: DEBUG nova.compute.claims [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1103.656765] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.657030] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.660148] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f7daed36-23cb-4828-ba3f-28836a06855d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.756263] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1103.784976] env[61573]: DEBUG nova.scheduler.client.report [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Refreshing inventories for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1103.800618] env[61573]: DEBUG nova.scheduler.client.report [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Updating ProviderTree inventory for provider b1eff98b-2b30-4574-a87d-d151235a2dba from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1103.800902] env[61573]: DEBUG nova.compute.provider_tree [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Updating inventory in ProviderTree for provider b1eff98b-2b30-4574-a87d-d151235a2dba with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1103.813380] env[61573]: DEBUG nova.scheduler.client.report [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Refreshing aggregate associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, aggregates: None {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1103.816171] env[61573]: DEBUG oslo_vmware.rw_handles [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a98e3b6-f2bc-4b2e-a335-00f35949779c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1103.872849] env[61573]: DEBUG nova.scheduler.client.report [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Refreshing trait associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1103.877235] env[61573]: DEBUG oslo_vmware.rw_handles [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1103.877412] env[61573]: DEBUG oslo_vmware.rw_handles [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a98e3b6-f2bc-4b2e-a335-00f35949779c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1104.162531] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64fb46f-3dee-4ca8-a92f-6b32eb9f9397 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.170962] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172512f1-b0a3-4586-b99b-c1662df03c6d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.202114] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860501fd-6b40-4ebc-a4f8-37bedd608c7a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.210211] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fad42ea-b108-45f6-9baa-9c206b9b9ab3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.223893] env[61573]: DEBUG nova.compute.provider_tree [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.232949] env[61573]: DEBUG nova.scheduler.client.report [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1104.248685] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.592s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.249241] env[61573]: ERROR nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1104.249241] env[61573]: Faults: ['InvalidArgument'] [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Traceback (most recent call last): [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] self.driver.spawn(context, instance, image_meta, [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] self._fetch_image_if_missing(context, vi) [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] image_cache(vi, tmp_image_ds_loc) [ 1104.249241] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] vm_util.copy_virtual_disk( [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] session._wait_for_task(vmdk_copy_task) [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] return self.wait_for_task(task_ref) [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] return evt.wait() [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] result = hub.switch() [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] return self.greenlet.switch() [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1104.249680] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] self.f(*self.args, **self.kw) [ 1104.250130] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1104.250130] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] raise exceptions.translate_fault(task_info.error) [ 1104.250130] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1104.250130] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Faults: ['InvalidArgument'] [ 1104.250130] env[61573]: ERROR nova.compute.manager [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] [ 1104.250130] env[61573]: DEBUG nova.compute.utils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1104.251795] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Build of instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 was re-scheduled: A specified parameter was not correct: fileType [ 1104.251795] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1104.252230] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1104.252415] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1104.252588] env[61573]: DEBUG nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1104.252748] env[61573]: DEBUG nova.network.neutron [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1104.403252] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.414640] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.414899] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.415083] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.415246] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1104.416487] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ded7fb-56a8-4288-abbb-8521cf528315 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.425845] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d763ac2-bd95-4a67-9667-7c8a2ec0e321 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.443908] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca4818e-b242-4126-9d66-176863d6a56d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.449327] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fa1dc3-e59e-4df5-8c99-3cd4887e52a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.482064] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180547MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1104.482064] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.482064] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.584805] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.585181] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1104.585181] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1104.585181] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1104.585306] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1104.585392] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1104.585508] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1104.585625] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1104.585738] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1104.585850] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1104.598695] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.611442] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.630239] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 404b990e-a6c8-4166-be41-e49d44269fc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.650857] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1eb74431-a48c-4427-97dc-a9ce4666605a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.657006] env[61573]: DEBUG nova.network.neutron [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.662891] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cd4fae54-ce8d-491c-9244-a32fd88e0183 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.671267] env[61573]: INFO nova.compute.manager [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Took 0.42 seconds to deallocate network for instance. [ 1104.676264] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4cd581ab-601a-4854-8b3a-5f368b40f2c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.687603] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c93aa98a-2c79-4ae3-ba66-7b2e4b67291b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.705062] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 7b28cf4f-5ba2-4fde-8c53-7a403166ae2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.716898] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 546a2a2f-fca3-410f-88c0-f71a820fd2bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.730441] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.742888] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.771849] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.785424] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1104.785684] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1104.785823] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '45', 'num_instances': '9', 'num_vm_building': '9', 'num_task_deleting': '8', 'num_os_type_None': '9', 'num_proj_6179168f3e4f4df09875705a0ee778df': '2', 'io_workload': '9', 'num_proj_58b3da30203f416e9b30c28a291d7b4e': '1', 'num_proj_415cc2d7de384f7ca1b8c26e32974978': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_27a816079ac04dabaf85dec4005df607': '1', 'num_proj_09c0dcd38fb64257ba2c08c59f75f097': '1', 'num_proj_c510d515c28b4abb9f0dcc76c0032b88': '1', 'num_task_spawning': '1', 'num_proj_4ebf7b3bebe84e8f8a46532726f8935b': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1104.807263] env[61573]: INFO nova.scheduler.client.report [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Deleted allocations for instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 [ 1104.834024] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b51752c0-1a99-445c-b0e4-cc2e5b5150fc tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 532.272s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.835610] env[61573]: DEBUG oslo_concurrency.lockutils [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 333.915s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.835610] env[61573]: DEBUG oslo_concurrency.lockutils [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Acquiring lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.835842] env[61573]: DEBUG oslo_concurrency.lockutils [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.835842] env[61573]: DEBUG oslo_concurrency.lockutils [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.838308] env[61573]: INFO nova.compute.manager [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Terminating instance [ 1104.840764] env[61573]: DEBUG nova.compute.manager [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1104.840963] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1104.841487] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a826816-1695-465d-97cb-f642106a186f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.846453] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1104.857313] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97055217-474f-4bd1-9186-50b934af5680 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.891860] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f63a7f37-09ac-4fe8-a1a3-7e13eb158526 could not be found. [ 1104.892148] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1104.892304] env[61573]: INFO nova.compute.manager [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1104.892622] env[61573]: DEBUG oslo.service.loopingcall [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1104.898080] env[61573]: DEBUG nova.compute.manager [-] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1104.898123] env[61573]: DEBUG nova.network.neutron [-] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1104.912705] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.926497] env[61573]: DEBUG nova.network.neutron [-] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.936455] env[61573]: INFO nova.compute.manager [-] [instance: f63a7f37-09ac-4fe8-a1a3-7e13eb158526] Took 0.04 seconds to deallocate network for instance. [ 1105.033494] env[61573]: DEBUG oslo_concurrency.lockutils [None req-63f9c364-e560-4ced-bd26-317be21f545e tempest-VolumesAdminNegativeTest-814286622 tempest-VolumesAdminNegativeTest-814286622-project-member] Lock "f63a7f37-09ac-4fe8-a1a3-7e13eb158526" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.136126] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed76f50-d05b-48a4-87e2-2c343e39c307 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.144311] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aca9ca8-1097-4e4f-b51e-aea9af63eeae {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.174377] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942e0ae0-399d-4765-bd1c-80dc2bbf3d6a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.182587] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0d88b3-9d04-42cd-b7d1-45afa94defcc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.196411] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.207019] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1105.221982] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1105.222240] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.741s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.222516] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.310s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.224137] env[61573]: INFO nova.compute.claims [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1105.550118] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b092d59-3663-498e-bf5e-17153a27865e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.558324] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdfcd5d-8afb-48ac-b6d6-a41301557690 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.587979] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad6fe2b-d129-463b-9b9e-859c8c7fc230 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.595810] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a04215-c721-41c8-ac47-4a2ca0fb4ae3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.609214] env[61573]: DEBUG nova.compute.provider_tree [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.618691] env[61573]: DEBUG nova.scheduler.client.report [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1105.655100] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.432s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.655652] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1105.695013] env[61573]: DEBUG nova.compute.utils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1105.696381] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1105.696585] env[61573]: DEBUG nova.network.neutron [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1105.706780] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1105.769394] env[61573]: DEBUG nova.policy [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f5bdeef45de42b5beb851921fd21c7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b194ec23978b411dbedfcb7095dbb743', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1105.778321] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1105.804148] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1105.804401] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1105.804558] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.804784] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1105.804942] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.805105] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1105.805332] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1105.805493] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1105.805659] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1105.805826] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1105.806009] env[61573]: DEBUG nova.virt.hardware [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1105.806885] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad872c53-0b75-457d-bcd2-74d1b7b32028 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.815802] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c0480a-a61e-4fa0-9edb-42a3e881407d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.199651] env[61573]: DEBUG nova.network.neutron [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Successfully created port: 589a8658-1e43-490e-8fc2-d2209bae8cb1 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1106.222485] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.224749] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.404336] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.404481] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1106.404518] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1106.430608] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.430775] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.430907] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.431262] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.431502] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.431686] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.431861] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.432041] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.432222] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.432392] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1106.432569] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1106.433173] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.433428] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.433697] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.868379] env[61573]: DEBUG nova.compute.manager [req-39d8d75c-2519-4e72-8222-d5e83822cf30 req-a3684765-54d9-4af4-b114-95049ec54ce0 service nova] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Received event network-vif-plugged-589a8658-1e43-490e-8fc2-d2209bae8cb1 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1106.868665] env[61573]: DEBUG oslo_concurrency.lockutils [req-39d8d75c-2519-4e72-8222-d5e83822cf30 req-a3684765-54d9-4af4-b114-95049ec54ce0 service nova] Acquiring lock "8619fddd-ad89-42b3-95c5-55def25b6df2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.868802] env[61573]: DEBUG oslo_concurrency.lockutils [req-39d8d75c-2519-4e72-8222-d5e83822cf30 req-a3684765-54d9-4af4-b114-95049ec54ce0 service nova] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.868971] env[61573]: DEBUG oslo_concurrency.lockutils [req-39d8d75c-2519-4e72-8222-d5e83822cf30 req-a3684765-54d9-4af4-b114-95049ec54ce0 service nova] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.869153] env[61573]: DEBUG nova.compute.manager [req-39d8d75c-2519-4e72-8222-d5e83822cf30 req-a3684765-54d9-4af4-b114-95049ec54ce0 service nova] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] No waiting events found dispatching network-vif-plugged-589a8658-1e43-490e-8fc2-d2209bae8cb1 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1106.869321] env[61573]: WARNING nova.compute.manager [req-39d8d75c-2519-4e72-8222-d5e83822cf30 req-a3684765-54d9-4af4-b114-95049ec54ce0 service nova] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Received unexpected event network-vif-plugged-589a8658-1e43-490e-8fc2-d2209bae8cb1 for instance with vm_state building and task_state spawning. [ 1106.951677] env[61573]: DEBUG nova.network.neutron [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Successfully updated port: 589a8658-1e43-490e-8fc2-d2209bae8cb1 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1106.970586] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquiring lock "refresh_cache-8619fddd-ad89-42b3-95c5-55def25b6df2" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.970746] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquired lock "refresh_cache-8619fddd-ad89-42b3-95c5-55def25b6df2" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.970901] env[61573]: DEBUG nova.network.neutron [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1107.035964] env[61573]: DEBUG nova.network.neutron [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1107.287173] env[61573]: DEBUG nova.network.neutron [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Updating instance_info_cache with network_info: [{"id": "589a8658-1e43-490e-8fc2-d2209bae8cb1", "address": "fa:16:3e:c5:66:69", "network": {"id": "40a7a989-f712-4c85-b4d1-e3797e478b64", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1016685578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b194ec23978b411dbedfcb7095dbb743", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap589a8658-1e", "ovs_interfaceid": "589a8658-1e43-490e-8fc2-d2209bae8cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.301791] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Releasing lock "refresh_cache-8619fddd-ad89-42b3-95c5-55def25b6df2" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.302139] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Instance network_info: |[{"id": "589a8658-1e43-490e-8fc2-d2209bae8cb1", "address": "fa:16:3e:c5:66:69", "network": {"id": "40a7a989-f712-4c85-b4d1-e3797e478b64", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1016685578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b194ec23978b411dbedfcb7095dbb743", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap589a8658-1e", "ovs_interfaceid": "589a8658-1e43-490e-8fc2-d2209bae8cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1107.302572] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:66:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '589a8658-1e43-490e-8fc2-d2209bae8cb1', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.310883] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Creating folder: Project (b194ec23978b411dbedfcb7095dbb743). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1107.311605] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a8da6b6-7dc9-4a35-a83e-d85bfec94000 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.324214] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Created folder: Project (b194ec23978b411dbedfcb7095dbb743) in parent group-v942801. [ 1107.324439] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Creating folder: Instances. Parent ref: group-v942864. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1107.324696] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4f196d0-1c4a-42c0-acd9-4211958f1aad {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.334693] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Created folder: Instances in parent group-v942864. [ 1107.334920] env[61573]: DEBUG oslo.service.loopingcall [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1107.335127] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1107.335339] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71387ad6-4bfb-4fcd-b7a7-ed1d12a025aa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.354410] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.354410] env[61573]: value = "task-4836813" [ 1107.354410] env[61573]: _type = "Task" [ 1107.354410] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.362906] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836813, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.865266] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836813, 'name': CreateVM_Task, 'duration_secs': 0.307764} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.865451] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1107.866244] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.866449] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.866796] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1107.867101] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78343f81-e145-49da-b06d-8b920025ee7f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.872263] env[61573]: DEBUG oslo_vmware.api [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Waiting for the task: (returnval){ [ 1107.872263] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5266d63c-944a-18d1-625e-ebab4ab196d5" [ 1107.872263] env[61573]: _type = "Task" [ 1107.872263] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.881037] env[61573]: DEBUG oslo_vmware.api [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5266d63c-944a-18d1-625e-ebab4ab196d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.384145] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.385148] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.385148] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.950528] env[61573]: DEBUG nova.compute.manager [req-656f006d-b899-45f7-a7e3-a1c06249f34e req-63e98b20-b04c-417c-9d77-592db8629374 service nova] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Received event network-changed-589a8658-1e43-490e-8fc2-d2209bae8cb1 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1108.950779] env[61573]: DEBUG nova.compute.manager [req-656f006d-b899-45f7-a7e3-a1c06249f34e req-63e98b20-b04c-417c-9d77-592db8629374 service nova] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Refreshing instance network info cache due to event network-changed-589a8658-1e43-490e-8fc2-d2209bae8cb1. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1108.950940] env[61573]: DEBUG oslo_concurrency.lockutils [req-656f006d-b899-45f7-a7e3-a1c06249f34e req-63e98b20-b04c-417c-9d77-592db8629374 service nova] Acquiring lock "refresh_cache-8619fddd-ad89-42b3-95c5-55def25b6df2" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.951097] env[61573]: DEBUG oslo_concurrency.lockutils [req-656f006d-b899-45f7-a7e3-a1c06249f34e req-63e98b20-b04c-417c-9d77-592db8629374 service nova] Acquired lock "refresh_cache-8619fddd-ad89-42b3-95c5-55def25b6df2" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.951275] env[61573]: DEBUG nova.network.neutron [req-656f006d-b899-45f7-a7e3-a1c06249f34e req-63e98b20-b04c-417c-9d77-592db8629374 service nova] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Refreshing network info cache for port 589a8658-1e43-490e-8fc2-d2209bae8cb1 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1109.590471] env[61573]: DEBUG nova.network.neutron [req-656f006d-b899-45f7-a7e3-a1c06249f34e req-63e98b20-b04c-417c-9d77-592db8629374 service nova] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Updated VIF entry in instance network info cache for port 589a8658-1e43-490e-8fc2-d2209bae8cb1. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1109.590837] env[61573]: DEBUG nova.network.neutron [req-656f006d-b899-45f7-a7e3-a1c06249f34e req-63e98b20-b04c-417c-9d77-592db8629374 service nova] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Updating instance_info_cache with network_info: [{"id": "589a8658-1e43-490e-8fc2-d2209bae8cb1", "address": "fa:16:3e:c5:66:69", "network": {"id": "40a7a989-f712-4c85-b4d1-e3797e478b64", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1016685578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b194ec23978b411dbedfcb7095dbb743", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap589a8658-1e", "ovs_interfaceid": "589a8658-1e43-490e-8fc2-d2209bae8cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.603967] env[61573]: DEBUG oslo_concurrency.lockutils [req-656f006d-b899-45f7-a7e3-a1c06249f34e req-63e98b20-b04c-417c-9d77-592db8629374 service nova] Releasing lock "refresh_cache-8619fddd-ad89-42b3-95c5-55def25b6df2" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.666995] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.697522] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Getting list of instances from cluster (obj){ [ 1133.697522] env[61573]: value = "domain-c8" [ 1133.697522] env[61573]: _type = "ClusterComputeResource" [ 1133.697522] env[61573]: } {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1133.699339] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b5bd68-bd03-4503-8e28-ff97c39e83eb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.718038] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Got total of 10 instances {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1133.718038] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.718236] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.718396] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 8c57d493-12c4-47fe-a355-c9ade98b7158 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.718550] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 338e2879-7dbe-4334-80da-4bbc1a071aa8 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.718699] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 0d91230d-849f-4e64-8685-5298ee5ea5b1 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.718848] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 36a5ff6d-5123-4323-8e86-3529828af0ab {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.718994] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid ce7d9bf8-55ad-4bbc-a139-55ff85cda08f {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.719155] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 54860ec5-a1ff-4d7d-ae70-769f8fad731b {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.719302] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 8d624aa8-c52f-4d3b-bb7e-fac412249b97 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.719443] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 8619fddd-ad89-42b3-95c5-55def25b6df2 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1133.719778] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.720013] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.720233] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "8c57d493-12c4-47fe-a355-c9ade98b7158" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.720425] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "338e2879-7dbe-4334-80da-4bbc1a071aa8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.720633] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "0d91230d-849f-4e64-8685-5298ee5ea5b1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.720822] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "36a5ff6d-5123-4323-8e86-3529828af0ab" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.721014] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.721211] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.721417] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.721644] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "8619fddd-ad89-42b3-95c5-55def25b6df2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.660284] env[61573]: WARNING oslo_vmware.rw_handles [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1149.660284] env[61573]: ERROR oslo_vmware.rw_handles [ 1149.661230] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/0a98e3b6-f2bc-4b2e-a335-00f35949779c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1149.662807] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1149.663075] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Copying Virtual Disk [datastore2] vmware_temp/0a98e3b6-f2bc-4b2e-a335-00f35949779c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/0a98e3b6-f2bc-4b2e-a335-00f35949779c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1149.663405] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-191ab3f8-3908-46d5-9dfd-45a49c14a1b4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.672471] env[61573]: DEBUG oslo_vmware.api [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for the task: (returnval){ [ 1149.672471] env[61573]: value = "task-4836814" [ 1149.672471] env[61573]: _type = "Task" [ 1149.672471] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.681124] env[61573]: DEBUG oslo_vmware.api [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': task-4836814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.183121] env[61573]: DEBUG oslo_vmware.exceptions [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1150.183424] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.183995] env[61573]: ERROR nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1150.183995] env[61573]: Faults: ['InvalidArgument'] [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Traceback (most recent call last): [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] yield resources [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] self.driver.spawn(context, instance, image_meta, [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] self._fetch_image_if_missing(context, vi) [ 1150.183995] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] image_cache(vi, tmp_image_ds_loc) [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] vm_util.copy_virtual_disk( [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] session._wait_for_task(vmdk_copy_task) [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] return self.wait_for_task(task_ref) [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] return evt.wait() [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] result = hub.switch() [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1150.184400] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] return self.greenlet.switch() [ 1150.184809] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1150.184809] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] self.f(*self.args, **self.kw) [ 1150.184809] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1150.184809] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] raise exceptions.translate_fault(task_info.error) [ 1150.184809] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1150.184809] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Faults: ['InvalidArgument'] [ 1150.184809] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] [ 1150.184809] env[61573]: INFO nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Terminating instance [ 1150.186026] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.186236] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1150.187035] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cac1044-642a-4d21-babd-1a9d853725aa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.189018] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1150.189219] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1150.189944] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4565ea5f-bf37-49f0-a927-92698cd5a44f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.196960] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1150.197212] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef4b42ff-77d2-4f9a-b507-1c1b28d024b1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.199502] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1150.199640] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1150.201061] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58a2088c-82b7-42fc-baa0-4c5b1f1841e8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.208015] env[61573]: DEBUG oslo_vmware.api [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for the task: (returnval){ [ 1150.208015] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52ed9dde-c131-5fc7-b763-e5f49b5272d3" [ 1150.208015] env[61573]: _type = "Task" [ 1150.208015] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.213036] env[61573]: DEBUG oslo_vmware.api [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52ed9dde-c131-5fc7-b763-e5f49b5272d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.276813] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1150.277104] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1150.277293] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Deleting the datastore file [datastore2] f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1150.277598] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1e1d255-570c-4554-b7a0-9beb0fbb4c6d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.284922] env[61573]: DEBUG oslo_vmware.api [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for the task: (returnval){ [ 1150.284922] env[61573]: value = "task-4836816" [ 1150.284922] env[61573]: _type = "Task" [ 1150.284922] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.294424] env[61573]: DEBUG oslo_vmware.api [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': task-4836816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.717462] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1150.717462] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Creating directory with path [datastore2] vmware_temp/ffb6fa53-d483-4603-83ed-41f1e5caa781/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1150.717856] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b8cb978-33d4-4551-bae8-2156328b7742 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.730984] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Created directory with path [datastore2] vmware_temp/ffb6fa53-d483-4603-83ed-41f1e5caa781/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1150.731252] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Fetch image to [datastore2] vmware_temp/ffb6fa53-d483-4603-83ed-41f1e5caa781/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1150.731445] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/ffb6fa53-d483-4603-83ed-41f1e5caa781/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1150.732259] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94dd4dd9-1f39-4ba5-8882-8f52be500773 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.740146] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfeb197-b34b-4c42-94c4-258141179b62 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.749968] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096f0cc2-fb25-4432-b54c-667a7844bb95 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.781526] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc61098-df87-4f1b-af84-4d936cd5562e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.790683] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-580ea2fb-26dd-4ad2-bea5-b631f66afbce {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.795223] env[61573]: DEBUG oslo_vmware.api [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': task-4836816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079692} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.796035] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1150.796162] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1150.796450] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1150.796530] env[61573]: INFO nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1150.798664] env[61573]: DEBUG nova.compute.claims [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1150.798843] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.799093] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.816558] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1150.871762] env[61573]: DEBUG oslo_vmware.rw_handles [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ffb6fa53-d483-4603-83ed-41f1e5caa781/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1150.932990] env[61573]: DEBUG oslo_vmware.rw_handles [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1150.932990] env[61573]: DEBUG oslo_vmware.rw_handles [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ffb6fa53-d483-4603-83ed-41f1e5caa781/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1151.182140] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa56bcfd-0c9e-4a72-a2a8-c85e6ac8c3ea {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.190528] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f6eb8c-c0bb-4f67-a3d2-90cf61d513c2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.221198] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cf3257-06ce-499a-966e-84010267611b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.229146] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693d34c0-37cd-4547-bd39-33f0b3bde427 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.243046] env[61573]: DEBUG nova.compute.provider_tree [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.251941] env[61573]: DEBUG nova.scheduler.client.report [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1151.266818] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.468s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.267382] env[61573]: ERROR nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1151.267382] env[61573]: Faults: ['InvalidArgument'] [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Traceback (most recent call last): [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] self.driver.spawn(context, instance, image_meta, [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] self._fetch_image_if_missing(context, vi) [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] image_cache(vi, tmp_image_ds_loc) [ 1151.267382] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] vm_util.copy_virtual_disk( [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] session._wait_for_task(vmdk_copy_task) [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] return self.wait_for_task(task_ref) [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] return evt.wait() [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] result = hub.switch() [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] return self.greenlet.switch() [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1151.267806] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] self.f(*self.args, **self.kw) [ 1151.268224] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1151.268224] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] raise exceptions.translate_fault(task_info.error) [ 1151.268224] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1151.268224] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Faults: ['InvalidArgument'] [ 1151.268224] env[61573]: ERROR nova.compute.manager [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] [ 1151.268224] env[61573]: DEBUG nova.compute.utils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1151.269672] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Build of instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 was re-scheduled: A specified parameter was not correct: fileType [ 1151.269672] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1151.270050] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1151.270225] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1151.270393] env[61573]: DEBUG nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1151.270557] env[61573]: DEBUG nova.network.neutron [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1151.580419] env[61573]: DEBUG nova.network.neutron [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.609914] env[61573]: INFO nova.compute.manager [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Took 0.34 seconds to deallocate network for instance. [ 1151.717628] env[61573]: INFO nova.scheduler.client.report [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Deleted allocations for instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 [ 1151.742088] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5ec34f83-5acf-406f-bdad-803420e54a9a tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 577.263s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.743351] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 175.409s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.743991] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.744245] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.744454] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.746622] env[61573]: INFO nova.compute.manager [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Terminating instance [ 1151.749433] env[61573]: DEBUG nova.compute.manager [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1151.749744] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1151.749959] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a654b98c-bd8d-42dd-8df8-38996446d872 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.754842] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1151.762021] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cb4371-5dab-4a1c-b3d9-f5a33bb1473c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.505813] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f54d8d6e-4ecc-498e-aca0-4209fbf3ce04 could not be found. [ 1152.506016] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1152.506164] env[61573]: INFO nova.compute.manager [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Took 0.76 seconds to destroy the instance on the hypervisor. [ 1152.506422] env[61573]: DEBUG oslo.service.loopingcall [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1152.509359] env[61573]: DEBUG nova.compute.manager [-] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1152.509494] env[61573]: DEBUG nova.network.neutron [-] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1152.528892] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.529165] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.530651] env[61573]: INFO nova.compute.claims [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1152.544339] env[61573]: DEBUG nova.network.neutron [-] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.559056] env[61573]: INFO nova.compute.manager [-] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] Took 0.05 seconds to deallocate network for instance. [ 1152.674705] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3bfd05a0-2730-4ed4-b036-55dd03335e2c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.931s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.675472] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 18.956s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.675752] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: f54d8d6e-4ecc-498e-aca0-4209fbf3ce04] During sync_power_state the instance has a pending task (deleting). Skip. [ 1152.676406] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "f54d8d6e-4ecc-498e-aca0-4209fbf3ce04" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.861843] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afb60a6-62b9-4fcc-9332-09c8b6e6067d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.870016] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ece604-dace-400c-bf2b-2b4d40848da8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.899567] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114b266d-61d1-4ef9-90ce-136b6830be36 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.907359] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162c1580-e36d-4338-b8ee-ae0a04b0a0a9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.921649] env[61573]: DEBUG nova.compute.provider_tree [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.930025] env[61573]: DEBUG nova.scheduler.client.report [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1152.946368] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.417s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.946884] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1152.980987] env[61573]: DEBUG nova.compute.utils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1152.982533] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1152.982701] env[61573]: DEBUG nova.network.neutron [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1152.995964] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1153.071575] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1153.085432] env[61573]: DEBUG nova.policy [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5057555bfd4a486b9f4803b0e93dca8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38a7a73ba2f74211829146bc750ec0aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1153.096546] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1153.096785] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1153.096938] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1153.097129] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1153.097273] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1153.097415] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1153.097617] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1153.097777] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1153.097939] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1153.098124] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1153.098302] env[61573]: DEBUG nova.virt.hardware [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1153.099137] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326a5318-3c46-4bdb-b843-aff3252f0758 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.107092] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e1aae9-297d-43b6-a55a-e31cd91624ff {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.485105] env[61573]: DEBUG nova.network.neutron [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Successfully created port: 928745b7-a311-4019-9f9f-afb3895c4d43 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1154.224678] env[61573]: DEBUG nova.compute.manager [req-ad522f69-577b-4350-bc4f-289f94c66218 req-52a8b797-8e3e-42b9-8f6e-d210e9f4af0f service nova] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Received event network-vif-plugged-928745b7-a311-4019-9f9f-afb3895c4d43 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1154.224678] env[61573]: DEBUG oslo_concurrency.lockutils [req-ad522f69-577b-4350-bc4f-289f94c66218 req-52a8b797-8e3e-42b9-8f6e-d210e9f4af0f service nova] Acquiring lock "b1392e68-7dc9-4399-88a6-9463e06917b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.224885] env[61573]: DEBUG oslo_concurrency.lockutils [req-ad522f69-577b-4350-bc4f-289f94c66218 req-52a8b797-8e3e-42b9-8f6e-d210e9f4af0f service nova] Lock "b1392e68-7dc9-4399-88a6-9463e06917b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.224963] env[61573]: DEBUG oslo_concurrency.lockutils [req-ad522f69-577b-4350-bc4f-289f94c66218 req-52a8b797-8e3e-42b9-8f6e-d210e9f4af0f service nova] Lock "b1392e68-7dc9-4399-88a6-9463e06917b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.225150] env[61573]: DEBUG nova.compute.manager [req-ad522f69-577b-4350-bc4f-289f94c66218 req-52a8b797-8e3e-42b9-8f6e-d210e9f4af0f service nova] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] No waiting events found dispatching network-vif-plugged-928745b7-a311-4019-9f9f-afb3895c4d43 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1154.225314] env[61573]: WARNING nova.compute.manager [req-ad522f69-577b-4350-bc4f-289f94c66218 req-52a8b797-8e3e-42b9-8f6e-d210e9f4af0f service nova] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Received unexpected event network-vif-plugged-928745b7-a311-4019-9f9f-afb3895c4d43 for instance with vm_state building and task_state spawning. [ 1154.559502] env[61573]: DEBUG nova.network.neutron [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Successfully updated port: 928745b7-a311-4019-9f9f-afb3895c4d43 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1154.578311] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquiring lock "refresh_cache-b1392e68-7dc9-4399-88a6-9463e06917b4" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.578311] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquired lock "refresh_cache-b1392e68-7dc9-4399-88a6-9463e06917b4" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.578311] env[61573]: DEBUG nova.network.neutron [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1154.673670] env[61573]: DEBUG nova.network.neutron [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1155.013713] env[61573]: DEBUG nova.network.neutron [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Updating instance_info_cache with network_info: [{"id": "928745b7-a311-4019-9f9f-afb3895c4d43", "address": "fa:16:3e:ba:d8:4b", "network": {"id": "b61b22dc-8025-4027-ace6-b74bbc74414e", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-911100697-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a7a73ba2f74211829146bc750ec0aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap928745b7-a3", "ovs_interfaceid": "928745b7-a311-4019-9f9f-afb3895c4d43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.035607] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Releasing lock "refresh_cache-b1392e68-7dc9-4399-88a6-9463e06917b4" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.035842] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Instance network_info: |[{"id": "928745b7-a311-4019-9f9f-afb3895c4d43", "address": "fa:16:3e:ba:d8:4b", "network": {"id": "b61b22dc-8025-4027-ace6-b74bbc74414e", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-911100697-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a7a73ba2f74211829146bc750ec0aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap928745b7-a3", "ovs_interfaceid": "928745b7-a311-4019-9f9f-afb3895c4d43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1155.036274] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:d8:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '88651df2-0506-4f6c-b868-dd30a81f2b1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '928745b7-a311-4019-9f9f-afb3895c4d43', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1155.047528] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Creating folder: Project (38a7a73ba2f74211829146bc750ec0aa). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1155.047528] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a873b0f-d9e0-4c15-b9b8-d58ac8c64163 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.060622] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Created folder: Project (38a7a73ba2f74211829146bc750ec0aa) in parent group-v942801. [ 1155.060622] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Creating folder: Instances. Parent ref: group-v942867. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1155.060622] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7b4aeb2-2b1c-4915-852c-a8a6e88836bd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.071687] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Created folder: Instances in parent group-v942867. [ 1155.071949] env[61573]: DEBUG oslo.service.loopingcall [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1155.072153] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1155.072375] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-163bd5be-59e7-4b83-99b2-9a2e1a4ca180 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.093019] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1155.093019] env[61573]: value = "task-4836819" [ 1155.093019] env[61573]: _type = "Task" [ 1155.093019] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.101265] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836819, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.604148] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836819, 'name': CreateVM_Task, 'duration_secs': 0.312451} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.604148] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1155.615115] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.615115] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.615115] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1155.615115] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b70860da-7f37-4d73-9dca-82662d15391c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.619494] env[61573]: DEBUG oslo_vmware.api [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Waiting for the task: (returnval){ [ 1155.619494] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c14a46-d821-d172-e3aa-91366977ee79" [ 1155.619494] env[61573]: _type = "Task" [ 1155.619494] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.631417] env[61573]: DEBUG oslo_vmware.api [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c14a46-d821-d172-e3aa-91366977ee79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.136496] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.136850] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1156.138304] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.264227] env[61573]: DEBUG nova.compute.manager [req-8306610a-923d-426a-98ac-b63e5953e135 req-23baa68b-f7bf-4d8b-b946-335f40e3420e service nova] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Received event network-changed-928745b7-a311-4019-9f9f-afb3895c4d43 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1156.264929] env[61573]: DEBUG nova.compute.manager [req-8306610a-923d-426a-98ac-b63e5953e135 req-23baa68b-f7bf-4d8b-b946-335f40e3420e service nova] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Refreshing instance network info cache due to event network-changed-928745b7-a311-4019-9f9f-afb3895c4d43. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1156.264929] env[61573]: DEBUG oslo_concurrency.lockutils [req-8306610a-923d-426a-98ac-b63e5953e135 req-23baa68b-f7bf-4d8b-b946-335f40e3420e service nova] Acquiring lock "refresh_cache-b1392e68-7dc9-4399-88a6-9463e06917b4" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.264929] env[61573]: DEBUG oslo_concurrency.lockutils [req-8306610a-923d-426a-98ac-b63e5953e135 req-23baa68b-f7bf-4d8b-b946-335f40e3420e service nova] Acquired lock "refresh_cache-b1392e68-7dc9-4399-88a6-9463e06917b4" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.265091] env[61573]: DEBUG nova.network.neutron [req-8306610a-923d-426a-98ac-b63e5953e135 req-23baa68b-f7bf-4d8b-b946-335f40e3420e service nova] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Refreshing network info cache for port 928745b7-a311-4019-9f9f-afb3895c4d43 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1156.589094] env[61573]: DEBUG nova.network.neutron [req-8306610a-923d-426a-98ac-b63e5953e135 req-23baa68b-f7bf-4d8b-b946-335f40e3420e service nova] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Updated VIF entry in instance network info cache for port 928745b7-a311-4019-9f9f-afb3895c4d43. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1156.590348] env[61573]: DEBUG nova.network.neutron [req-8306610a-923d-426a-98ac-b63e5953e135 req-23baa68b-f7bf-4d8b-b946-335f40e3420e service nova] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Updating instance_info_cache with network_info: [{"id": "928745b7-a311-4019-9f9f-afb3895c4d43", "address": "fa:16:3e:ba:d8:4b", "network": {"id": "b61b22dc-8025-4027-ace6-b74bbc74414e", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-911100697-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a7a73ba2f74211829146bc750ec0aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap928745b7-a3", "ovs_interfaceid": "928745b7-a311-4019-9f9f-afb3895c4d43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.599913] env[61573]: DEBUG oslo_concurrency.lockutils [req-8306610a-923d-426a-98ac-b63e5953e135 req-23baa68b-f7bf-4d8b-b946-335f40e3420e service nova] Releasing lock "refresh_cache-b1392e68-7dc9-4399-88a6-9463e06917b4" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1158.636476] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.636803] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.682180] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.682445] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.459625] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.036371] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquiring lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.403956] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.403956] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1165.641025] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquiring lock "8619fddd-ad89-42b3-95c5-55def25b6df2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.399574] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.403315] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.404030] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.404030] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.404030] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.415513] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.415513] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.415693] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.415814] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1166.416953] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a7fd21-0f40-4923-a241-19cdeb8a6a3d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.427625] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57602c32-c100-4a0e-b9a6-ee471594d06a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.441951] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ca7987-2a91-448c-8891-4b5fbccae659 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.449286] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c3cc56-1cc5-4d12-9808-010ddcba27ac {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.477989] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180564MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1166.478276] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.478360] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.565496] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.565657] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.565806] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.565936] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.566068] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.566188] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.566352] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.566553] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.566694] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.566812] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1166.578739] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 404b990e-a6c8-4166-be41-e49d44269fc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.589738] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1eb74431-a48c-4427-97dc-a9ce4666605a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.600064] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cd4fae54-ce8d-491c-9244-a32fd88e0183 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.610959] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4cd581ab-601a-4854-8b3a-5f368b40f2c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.621476] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c93aa98a-2c79-4ae3-ba66-7b2e4b67291b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.631534] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 7b28cf4f-5ba2-4fde-8c53-7a403166ae2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.643272] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 546a2a2f-fca3-410f-88c0-f71a820fd2bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.655707] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.669313] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.680957] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.692320] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.704030] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.714511] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1166.714877] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1166.715088] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '47', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_6179168f3e4f4df09875705a0ee778df': '1', 'io_workload': '10', 'num_proj_58b3da30203f416e9b30c28a291d7b4e': '1', 'num_proj_415cc2d7de384f7ca1b8c26e32974978': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_27a816079ac04dabaf85dec4005df607': '1', 'num_proj_09c0dcd38fb64257ba2c08c59f75f097': '1', 'num_proj_c510d515c28b4abb9f0dcc76c0032b88': '1', 'num_proj_4ebf7b3bebe84e8f8a46532726f8935b': '1', 'num_proj_b194ec23978b411dbedfcb7095dbb743': '1', 'num_task_spawning': '1', 'num_proj_38a7a73ba2f74211829146bc750ec0aa': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1167.010434] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5dc0118-1645-4b2a-a2ea-d4cccc9615f4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.018724] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580df617-c801-4bfe-97fd-3e9146352c0a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.047943] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335d358c-1015-4cfd-aa11-bb8a809af025 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.055333] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28041830-7a09-4bbd-9688-8b582fb2b2c1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.068313] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.077261] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1167.093312] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1167.093494] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.615s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.093536] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.404210] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.404387] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1168.404526] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1168.427845] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.428123] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.428204] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.428275] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.428380] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.428499] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.428617] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.428766] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.428927] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.429024] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1168.429148] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1172.179435] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquiring lock "b1392e68-7dc9-4399-88a6-9463e06917b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.841967] env[61573]: DEBUG oslo_concurrency.lockutils [None req-396dee37-420e-47da-8c75-dfbd5bcede63 tempest-ServerDiagnosticsNegativeTest-341467014 tempest-ServerDiagnosticsNegativeTest-341467014-project-member] Acquiring lock "92145176-a567-4098-a9d4-f74a9316e38c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.842594] env[61573]: DEBUG oslo_concurrency.lockutils [None req-396dee37-420e-47da-8c75-dfbd5bcede63 tempest-ServerDiagnosticsNegativeTest-341467014 tempest-ServerDiagnosticsNegativeTest-341467014-project-member] Lock "92145176-a567-4098-a9d4-f74a9316e38c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.411454] env[61573]: DEBUG oslo_concurrency.lockutils [None req-71c1c702-cab9-452e-bb8b-6ff6963c1815 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "1316abcb-b4b8-4e7d-858e-55a4db29b429" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.411852] env[61573]: DEBUG oslo_concurrency.lockutils [None req-71c1c702-cab9-452e-bb8b-6ff6963c1815 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "1316abcb-b4b8-4e7d-858e-55a4db29b429" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.309276] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquiring lock "3e075864-6503-4d83-bbd4-f0bec8104e03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.309564] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.680138] env[61573]: WARNING oslo_vmware.rw_handles [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1199.680138] env[61573]: ERROR oslo_vmware.rw_handles [ 1199.681052] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/ffb6fa53-d483-4603-83ed-41f1e5caa781/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1199.687518] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1199.689110] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Copying Virtual Disk [datastore2] vmware_temp/ffb6fa53-d483-4603-83ed-41f1e5caa781/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/ffb6fa53-d483-4603-83ed-41f1e5caa781/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1199.689362] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4414249-5542-484e-a4ba-55b83e906734 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.699148] env[61573]: DEBUG oslo_vmware.api [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for the task: (returnval){ [ 1199.699148] env[61573]: value = "task-4836820" [ 1199.699148] env[61573]: _type = "Task" [ 1199.699148] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.709581] env[61573]: DEBUG oslo_vmware.api [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': task-4836820, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.216486] env[61573]: DEBUG oslo_vmware.exceptions [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1200.217474] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.217474] env[61573]: ERROR nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1200.217474] env[61573]: Faults: ['InvalidArgument'] [ 1200.217474] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Traceback (most recent call last): [ 1200.217474] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1200.217474] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] yield resources [ 1200.217474] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1200.217474] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] self.driver.spawn(context, instance, image_meta, [ 1200.217474] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1200.217474] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] self._fetch_image_if_missing(context, vi) [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] image_cache(vi, tmp_image_ds_loc) [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] vm_util.copy_virtual_disk( [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] session._wait_for_task(vmdk_copy_task) [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] return self.wait_for_task(task_ref) [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] return evt.wait() [ 1200.218469] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] result = hub.switch() [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] return self.greenlet.switch() [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] self.f(*self.args, **self.kw) [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] raise exceptions.translate_fault(task_info.error) [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Faults: ['InvalidArgument'] [ 1200.218920] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] [ 1200.218920] env[61573]: INFO nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Terminating instance [ 1200.219292] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.219509] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1200.219753] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e109723d-1c83-4a6d-b94a-ca2bf9a706cd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.222019] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1200.222292] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1200.223032] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f59c67-fc2c-4bfe-b8d1-85fb04fd255d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.232601] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1200.232855] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddf680c9-6be2-4604-ac15-09b776bf4e58 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.239030] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1200.239030] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1200.239030] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4772c664-a0b7-4cd7-b28a-dc1a8afc10ef {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.241926] env[61573]: DEBUG oslo_vmware.api [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Waiting for the task: (returnval){ [ 1200.241926] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52831b24-a659-5fc6-af1e-49311b1968c8" [ 1200.241926] env[61573]: _type = "Task" [ 1200.241926] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.249813] env[61573]: DEBUG oslo_vmware.api [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52831b24-a659-5fc6-af1e-49311b1968c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.317035] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1200.317035] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1200.317035] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Deleting the datastore file [datastore2] 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1200.317035] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d7c1388-f266-4c2b-98fe-8baea5fd1dfd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.324244] env[61573]: DEBUG oslo_vmware.api [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for the task: (returnval){ [ 1200.324244] env[61573]: value = "task-4836822" [ 1200.324244] env[61573]: _type = "Task" [ 1200.324244] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.333980] env[61573]: DEBUG oslo_vmware.api [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': task-4836822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.753382] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1200.753693] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Creating directory with path [datastore2] vmware_temp/9377ff3b-951a-41db-a6e2-e2e5ead43937/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1200.753998] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f01d721-7735-455f-a11d-9c43a5363ee3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.766345] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Created directory with path [datastore2] vmware_temp/9377ff3b-951a-41db-a6e2-e2e5ead43937/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1200.766554] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Fetch image to [datastore2] vmware_temp/9377ff3b-951a-41db-a6e2-e2e5ead43937/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1200.766727] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/9377ff3b-951a-41db-a6e2-e2e5ead43937/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1200.767534] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4794202-a0c1-4c88-bb0f-dd021c648853 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.775185] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ffe7c9-1758-424c-9145-07d535263ea6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.785021] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb989136-4158-4fb5-80a3-a25a13d906c4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.816639] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe091df6-dbca-48ea-8d36-3b5785e662ab {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.824555] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-efe35900-d8be-4074-a897-4da359bb95cd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.834914] env[61573]: DEBUG oslo_vmware.api [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Task: {'id': task-4836822, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070618} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.835336] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1200.835572] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1200.835836] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1200.836155] env[61573]: INFO nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1200.838322] env[61573]: DEBUG nova.compute.claims [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1200.838557] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.838859] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.853313] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1201.060402] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.061625] env[61573]: ERROR nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] result = getattr(controller, method)(*args, **kwargs) [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._get(image_id) [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1201.061625] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] resp, body = self.http_client.get(url, headers=header) [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.request(url, 'GET', **kwargs) [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._handle_response(resp) [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise exc.from_response(resp, resp.content) [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] During handling of the above exception, another exception occurred: [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1201.062072] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] yield resources [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self.driver.spawn(context, instance, image_meta, [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._fetch_image_if_missing(context, vi) [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] image_fetch(context, vi, tmp_image_ds_loc) [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] images.fetch_image( [ 1201.062439] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] metadata = IMAGE_API.get(context, image_ref) [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return session.show(context, image_id, [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] _reraise_translated_image_exception(image_id) [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise new_exc.with_traceback(exc_trace) [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] result = getattr(controller, method)(*args, **kwargs) [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1201.062833] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._get(image_id) [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] resp, body = self.http_client.get(url, headers=header) [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.request(url, 'GET', **kwargs) [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._handle_response(resp) [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise exc.from_response(resp, resp.content) [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1201.063265] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1201.063649] env[61573]: INFO nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Terminating instance [ 1201.063999] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.064231] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1201.067446] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1201.067639] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1201.067893] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9ab8920-ca1d-43fb-81f8-0b1b5648a79d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.070633] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1ab6c6-d871-4e51-abe0-d72c41016347 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.078272] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1201.078536] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48283c9f-88e5-4f65-b5cc-9499018dbd61 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.080946] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1201.081155] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1201.082079] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d0d2506-06f1-449e-a6e6-bfe34d56449f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.093026] env[61573]: DEBUG oslo_vmware.api [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for the task: (returnval){ [ 1201.093026] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52da5f16-ea01-9633-e4ea-309e0ca71357" [ 1201.093026] env[61573]: _type = "Task" [ 1201.093026] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.098310] env[61573]: DEBUG oslo_vmware.api [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52da5f16-ea01-9633-e4ea-309e0ca71357, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.161173] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1201.161442] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1201.161632] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Deleting the datastore file [datastore2] 8c57d493-12c4-47fe-a355-c9ade98b7158 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.164727] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-638e4cd6-df48-4e8d-9a50-70c1da5b346c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.172693] env[61573]: DEBUG oslo_vmware.api [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Waiting for the task: (returnval){ [ 1201.172693] env[61573]: value = "task-4836824" [ 1201.172693] env[61573]: _type = "Task" [ 1201.172693] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.187556] env[61573]: DEBUG oslo_vmware.api [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Task: {'id': task-4836824, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.213859] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f8cdae-d029-4513-8a13-00b72e7afd82 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.221534] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a2812d-093e-474f-af8c-7b98348cfe9a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.253551] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1eeb162-461d-487f-9101-3e8279f9404e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.261647] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab47d0c7-6a36-42ab-8076-63b5b5360e1f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.276285] env[61573]: DEBUG nova.compute.provider_tree [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.288175] env[61573]: DEBUG nova.scheduler.client.report [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1201.305555] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.466s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.306235] env[61573]: ERROR nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1201.306235] env[61573]: Faults: ['InvalidArgument'] [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Traceback (most recent call last): [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] self.driver.spawn(context, instance, image_meta, [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] self._fetch_image_if_missing(context, vi) [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] image_cache(vi, tmp_image_ds_loc) [ 1201.306235] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] vm_util.copy_virtual_disk( [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] session._wait_for_task(vmdk_copy_task) [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] return self.wait_for_task(task_ref) [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] return evt.wait() [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] result = hub.switch() [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] return self.greenlet.switch() [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1201.306689] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] self.f(*self.args, **self.kw) [ 1201.307101] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1201.307101] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] raise exceptions.translate_fault(task_info.error) [ 1201.307101] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1201.307101] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Faults: ['InvalidArgument'] [ 1201.307101] env[61573]: ERROR nova.compute.manager [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] [ 1201.307535] env[61573]: DEBUG nova.compute.utils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1201.309233] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Build of instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 was re-scheduled: A specified parameter was not correct: fileType [ 1201.309233] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1201.309746] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1201.310008] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1201.310258] env[61573]: DEBUG nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1201.310483] env[61573]: DEBUG nova.network.neutron [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1201.600960] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1201.601256] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Creating directory with path [datastore2] vmware_temp/65992aa6-565d-4b57-b0e8-5b38f314ffee/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1201.601494] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b737711-420f-41e6-a70c-723e7d6c64db {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.614494] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Created directory with path [datastore2] vmware_temp/65992aa6-565d-4b57-b0e8-5b38f314ffee/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1201.614764] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Fetch image to [datastore2] vmware_temp/65992aa6-565d-4b57-b0e8-5b38f314ffee/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1201.614985] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/65992aa6-565d-4b57-b0e8-5b38f314ffee/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1201.615885] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716dd028-d9b0-4b2c-8102-4042e188918b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.623437] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fea497b-843a-43be-ba44-355f2a4b58a9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.638187] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240e66f9-96c7-48d1-9c69-de3475a9f035 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.689275] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfd44d7-06bd-4ff4-8c0d-2d60633aa98b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.700464] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-60ce92a0-fefc-403f-ad8f-17203aab1029 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.702846] env[61573]: DEBUG oslo_vmware.api [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Task: {'id': task-4836824, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076397} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.703434] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1201.703685] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1201.703801] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1201.703980] env[61573]: INFO nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1201.706292] env[61573]: DEBUG nova.compute.claims [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1201.706476] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.706695] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.726886] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1201.861579] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.861579] env[61573]: ERROR nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1201.861579] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1201.861579] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1201.861579] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1201.861579] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1201.861579] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] result = getattr(controller, method)(*args, **kwargs) [ 1201.861579] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1201.861579] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._get(image_id) [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] resp, body = self.http_client.get(url, headers=header) [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.request(url, 'GET', **kwargs) [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._handle_response(resp) [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise exc.from_response(resp, resp.content) [ 1201.861985] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] During handling of the above exception, another exception occurred: [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] yield resources [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self.driver.spawn(context, instance, image_meta, [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._fetch_image_if_missing(context, vi) [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1201.862484] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] image_fetch(context, vi, tmp_image_ds_loc) [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] images.fetch_image( [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] metadata = IMAGE_API.get(context, image_ref) [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return session.show(context, image_id, [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] _reraise_translated_image_exception(image_id) [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise new_exc.with_traceback(exc_trace) [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1201.863139] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] result = getattr(controller, method)(*args, **kwargs) [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._get(image_id) [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] resp, body = self.http_client.get(url, headers=header) [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.request(url, 'GET', **kwargs) [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._handle_response(resp) [ 1201.863661] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1201.864066] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise exc.from_response(resp, resp.content) [ 1201.864066] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1201.864066] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1201.864066] env[61573]: INFO nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Terminating instance [ 1201.864066] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.864066] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1201.864066] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1201.864278] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1201.864278] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9b221c6-3be1-4084-a5cf-e1a595fd17d6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.867857] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5e5e74-86b5-470f-ba22-5b5e58b614e7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.878653] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1201.879023] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09f94e13-f8d9-4ca0-9e30-c5b2f9fa371a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.882062] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1201.882342] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1201.886611] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9d796b6-ea84-4988-a3ba-3504f4a882fe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.894111] env[61573]: DEBUG oslo_vmware.api [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for the task: (returnval){ [ 1201.894111] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]526db011-1354-41bc-edd7-647b68fea4a0" [ 1201.894111] env[61573]: _type = "Task" [ 1201.894111] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.912303] env[61573]: DEBUG oslo_vmware.api [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]526db011-1354-41bc-edd7-647b68fea4a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.950881] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1201.951625] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1201.951686] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Deleting the datastore file [datastore2] 338e2879-7dbe-4334-80da-4bbc1a071aa8 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.954314] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23c48337-78ba-48d1-801a-2b59f7ba714c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.964050] env[61573]: DEBUG oslo_vmware.api [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for the task: (returnval){ [ 1201.964050] env[61573]: value = "task-4836826" [ 1201.964050] env[61573]: _type = "Task" [ 1201.964050] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.973627] env[61573]: DEBUG oslo_vmware.api [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Task: {'id': task-4836826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.164113] env[61573]: DEBUG nova.network.neutron [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.190147] env[61573]: INFO nova.compute.manager [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Took 0.88 seconds to deallocate network for instance. [ 1202.259298] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da22eeb5-92ba-4ced-a3ca-300a35753e55 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.267779] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6602be1d-b118-4e29-9cb9-db57f3003b0d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.313253] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6620698f-2e12-4821-8ffb-a6587dcf2137 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.323766] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03d0e90-999b-42d8-9af2-aec879bc7554 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.343166] env[61573]: DEBUG nova.compute.provider_tree [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.348114] env[61573]: INFO nova.scheduler.client.report [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Deleted allocations for instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 [ 1202.354639] env[61573]: DEBUG nova.scheduler.client.report [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1202.374322] env[61573]: DEBUG oslo_concurrency.lockutils [None req-890cec9b-7113-4cac-87c6-a0a8f219f38f tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.030s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.375899] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 422.266s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.376131] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Acquiring lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.376383] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.376633] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.378600] env[61573]: INFO nova.compute.manager [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Terminating instance [ 1202.380720] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.674s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.381449] env[61573]: ERROR nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] result = getattr(controller, method)(*args, **kwargs) [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._get(image_id) [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1202.381449] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] resp, body = self.http_client.get(url, headers=header) [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.request(url, 'GET', **kwargs) [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._handle_response(resp) [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise exc.from_response(resp, resp.content) [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] During handling of the above exception, another exception occurred: [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.381817] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self.driver.spawn(context, instance, image_meta, [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._fetch_image_if_missing(context, vi) [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] image_fetch(context, vi, tmp_image_ds_loc) [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] images.fetch_image( [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] metadata = IMAGE_API.get(context, image_ref) [ 1202.382208] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return session.show(context, image_id, [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] _reraise_translated_image_exception(image_id) [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise new_exc.with_traceback(exc_trace) [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] result = getattr(controller, method)(*args, **kwargs) [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._get(image_id) [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1202.382584] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] resp, body = self.http_client.get(url, headers=header) [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.request(url, 'GET', **kwargs) [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._handle_response(resp) [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise exc.from_response(resp, resp.content) [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1202.382995] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.382995] env[61573]: DEBUG nova.compute.utils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1202.383534] env[61573]: DEBUG nova.compute.manager [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1202.383729] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1202.384274] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f88969f-b1f4-40b8-995c-0c070f8b6bab {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.386615] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Build of instance 8c57d493-12c4-47fe-a355-c9ade98b7158 was re-scheduled: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1202.389549] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1202.389549] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1202.389549] env[61573]: DEBUG nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1202.389549] env[61573]: DEBUG nova.network.neutron [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1202.391823] env[61573]: DEBUG nova.compute.manager [None req-5400e020-3bbc-4ec9-b2df-b383570d678f tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 404b990e-a6c8-4166-be41-e49d44269fc2] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1202.405309] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdb3383-cef2-412a-be76-cba13ee1eca2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.423377] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1202.423377] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Creating directory with path [datastore2] vmware_temp/35e5b865-c1a5-42b0-8340-1d68f8ce27e7/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1202.423377] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4aef2a89-e530-49b7-85d4-a4e7fd040dc5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.433172] env[61573]: DEBUG nova.compute.manager [None req-5400e020-3bbc-4ec9-b2df-b383570d678f tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 404b990e-a6c8-4166-be41-e49d44269fc2] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1202.444108] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 277ddab5-2fef-4c64-ab26-22f1be2ca4f8 could not be found. [ 1202.444318] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1202.445224] env[61573]: INFO nova.compute.manager [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1202.445224] env[61573]: DEBUG oslo.service.loopingcall [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.448048] env[61573]: DEBUG nova.compute.manager [-] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1202.448048] env[61573]: DEBUG nova.network.neutron [-] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1202.448048] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Created directory with path [datastore2] vmware_temp/35e5b865-c1a5-42b0-8340-1d68f8ce27e7/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1202.448048] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Fetch image to [datastore2] vmware_temp/35e5b865-c1a5-42b0-8340-1d68f8ce27e7/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1202.448325] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/35e5b865-c1a5-42b0-8340-1d68f8ce27e7/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1202.448548] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686f0dea-c692-418b-b40b-bfc2b68176eb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.456798] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655be950-9694-48ad-a479-ce72dc5bbb10 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.474625] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdab3531-a691-432c-804a-55485229040c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.479775] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5400e020-3bbc-4ec9-b2df-b383570d678f tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "404b990e-a6c8-4166-be41-e49d44269fc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.543s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.490252] env[61573]: DEBUG oslo_vmware.api [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Task: {'id': task-4836826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082988} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.519069] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.519410] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1202.519478] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1202.519652] env[61573]: INFO nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1202.523601] env[61573]: DEBUG nova.compute.manager [None req-5a5bf772-239b-447d-945c-76a526e54172 tempest-InstanceActionsTestJSON-975270587 tempest-InstanceActionsTestJSON-975270587-project-member] [instance: 1eb74431-a48c-4427-97dc-a9ce4666605a] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1202.525432] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6242b2db-4566-47cf-b35a-afea3711c06e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.528918] env[61573]: DEBUG nova.compute.claims [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1202.528918] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.528918] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.536460] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bcac8771-f472-492b-89e3-8c3e0e9045a4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.559426] env[61573]: DEBUG neutronclient.v2_0.client [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61573) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1202.563273] env[61573]: ERROR nova.compute.manager [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] result = getattr(controller, method)(*args, **kwargs) [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._get(image_id) [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1202.563273] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] resp, body = self.http_client.get(url, headers=header) [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.request(url, 'GET', **kwargs) [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._handle_response(resp) [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise exc.from_response(resp, resp.content) [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] During handling of the above exception, another exception occurred: [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.563802] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self.driver.spawn(context, instance, image_meta, [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._fetch_image_if_missing(context, vi) [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] image_fetch(context, vi, tmp_image_ds_loc) [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] images.fetch_image( [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] metadata = IMAGE_API.get(context, image_ref) [ 1202.564203] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return session.show(context, image_id, [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] _reraise_translated_image_exception(image_id) [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise new_exc.with_traceback(exc_trace) [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] result = getattr(controller, method)(*args, **kwargs) [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._get(image_id) [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1202.564601] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] resp, body = self.http_client.get(url, headers=header) [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.request(url, 'GET', **kwargs) [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self._handle_response(resp) [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise exc.from_response(resp, resp.content) [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] During handling of the above exception, another exception occurred: [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.564960] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._build_and_run_instance(context, instance, image, [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise exception.RescheduledException( [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] nova.exception.RescheduledException: Build of instance 8c57d493-12c4-47fe-a355-c9ade98b7158 was re-scheduled: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] During handling of the above exception, another exception occurred: [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1202.565422] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] exception_handler_v20(status_code, error_body) [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise client_exc(message=error_message, [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Neutron server returns request_ids: ['req-8a4232b3-373b-40b7-aa6d-38ac0f371596'] [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] During handling of the above exception, another exception occurred: [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._deallocate_network(context, instance, requested_networks) [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self.network_api.deallocate_for_instance( [ 1202.565900] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] data = neutron.list_ports(**search_opts) [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.list('ports', self.ports_path, retrieve_all, [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] for r in self._pagination(collection, path, **params): [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] res = self.get(path, params=params) [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.566300] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.retry_request("GET", action, body=body, [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.do_request(method, action, body=body, [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._handle_fault_response(status_code, replybody, resp) [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise exception.Unauthorized() [ 1202.566671] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] nova.exception.Unauthorized: Not authorized. [ 1202.568088] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1202.568088] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1202.571365] env[61573]: DEBUG nova.compute.manager [None req-5a5bf772-239b-447d-945c-76a526e54172 tempest-InstanceActionsTestJSON-975270587 tempest-InstanceActionsTestJSON-975270587-project-member] [instance: 1eb74431-a48c-4427-97dc-a9ce4666605a] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1202.571365] env[61573]: DEBUG nova.network.neutron [-] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.604153] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5a5bf772-239b-447d-945c-76a526e54172 tempest-InstanceActionsTestJSON-975270587 tempest-InstanceActionsTestJSON-975270587-project-member] Lock "1eb74431-a48c-4427-97dc-a9ce4666605a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.839s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.615033] env[61573]: INFO nova.compute.manager [-] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] Took 0.17 seconds to deallocate network for instance. [ 1202.634091] env[61573]: DEBUG nova.compute.manager [None req-a22f1102-0917-4d4b-8ee3-f8e4e4b9bb46 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: cd4fae54-ce8d-491c-9244-a32fd88e0183] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1202.670966] env[61573]: DEBUG nova.compute.manager [None req-a22f1102-0917-4d4b-8ee3-f8e4e4b9bb46 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: cd4fae54-ce8d-491c-9244-a32fd88e0183] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1202.679261] env[61573]: INFO nova.scheduler.client.report [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Deleted allocations for instance 8c57d493-12c4-47fe-a355-c9ade98b7158 [ 1202.687040] env[61573]: DEBUG oslo_vmware.rw_handles [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35e5b865-c1a5-42b0-8340-1d68f8ce27e7/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1202.754658] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a22f1102-0917-4d4b-8ee3-f8e4e4b9bb46 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "cd4fae54-ce8d-491c-9244-a32fd88e0183" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.486s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.756468] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ca6c5d14-da66-44ba-9741-048a2b6b583d tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 622.486s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.764961] env[61573]: DEBUG oslo_concurrency.lockutils [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.266s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.764961] env[61573]: DEBUG oslo_concurrency.lockutils [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Acquiring lock "8c57d493-12c4-47fe-a355-c9ade98b7158-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.764961] env[61573]: DEBUG oslo_concurrency.lockutils [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.764961] env[61573]: DEBUG oslo_concurrency.lockutils [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.766224] env[61573]: DEBUG oslo_vmware.rw_handles [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1202.766412] env[61573]: DEBUG oslo_vmware.rw_handles [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35e5b865-c1a5-42b0-8340-1d68f8ce27e7/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1202.767264] env[61573]: INFO nova.compute.manager [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Terminating instance [ 1202.769432] env[61573]: DEBUG nova.compute.manager [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1202.769622] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1202.770658] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dbd532d-db12-4832-af73-46af5b0d5f6a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.784034] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83370dd8-bd07-4ae8-8d5d-0d4a8c064fb6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.796133] env[61573]: DEBUG nova.compute.manager [None req-464dc338-f7e4-4e66-97df-c4c28427c64e tempest-ServerRescueTestJSON-231905722 tempest-ServerRescueTestJSON-231905722-project-member] [instance: c93aa98a-2c79-4ae3-ba66-7b2e4b67291b] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1202.798809] env[61573]: DEBUG nova.compute.manager [None req-6634c392-cb95-4884-a6a9-41d9d577904d tempest-ServersTestBootFromVolume-584192889 tempest-ServersTestBootFromVolume-584192889-project-member] [instance: 4cd581ab-601a-4854-8b3a-5f368b40f2c4] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1202.823723] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8c57d493-12c4-47fe-a355-c9ade98b7158 could not be found. [ 1202.823957] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1202.824164] env[61573]: INFO nova.compute.manager [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1202.824420] env[61573]: DEBUG oslo.service.loopingcall [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.828130] env[61573]: DEBUG nova.compute.manager [-] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1202.828130] env[61573]: DEBUG nova.network.neutron [-] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1202.830676] env[61573]: DEBUG nova.compute.manager [None req-6634c392-cb95-4884-a6a9-41d9d577904d tempest-ServersTestBootFromVolume-584192889 tempest-ServersTestBootFromVolume-584192889-project-member] [instance: 4cd581ab-601a-4854-8b3a-5f368b40f2c4] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1202.888825] env[61573]: DEBUG nova.compute.manager [None req-464dc338-f7e4-4e66-97df-c4c28427c64e tempest-ServerRescueTestJSON-231905722 tempest-ServerRescueTestJSON-231905722-project-member] [instance: c93aa98a-2c79-4ae3-ba66-7b2e4b67291b] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1202.910713] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6634c392-cb95-4884-a6a9-41d9d577904d tempest-ServersTestBootFromVolume-584192889 tempest-ServersTestBootFromVolume-584192889-project-member] Lock "4cd581ab-601a-4854-8b3a-5f368b40f2c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.593s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.922760] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8c2ec65d-91b5-4ddb-a1ac-b81b68d2121c tempest-ListServerFiltersTestJSON-1417914962 tempest-ListServerFiltersTestJSON-1417914962-project-member] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.547s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.923988] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 69.204s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.924200] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 277ddab5-2fef-4c64-ab26-22f1be2ca4f8] During sync_power_state the instance has a pending task (deleting). Skip. [ 1202.924380] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "277ddab5-2fef-4c64-ab26-22f1be2ca4f8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.928979] env[61573]: DEBUG oslo_concurrency.lockutils [None req-464dc338-f7e4-4e66-97df-c4c28427c64e tempest-ServerRescueTestJSON-231905722 tempest-ServerRescueTestJSON-231905722-project-member] Lock "c93aa98a-2c79-4ae3-ba66-7b2e4b67291b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.148s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.933024] env[61573]: DEBUG nova.compute.manager [None req-8753a2c9-d3a3-4648-ade4-c5d8d7097b39 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] [instance: 7b28cf4f-5ba2-4fde-8c53-7a403166ae2a] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1202.942418] env[61573]: DEBUG nova.compute.manager [None req-0f733e81-23b8-4102-b11a-a41f841bc8e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: 546a2a2f-fca3-410f-88c0-f71a820fd2bd] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1202.981267] env[61573]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61573) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1202.981896] env[61573]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-745014d3-d8e2-4fa0-a182-23e0b1e274cd'] [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1202.982959] env[61573]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1202.983664] env[61573]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1202.984243] env[61573]: ERROR oslo.service.loopingcall [ 1202.984755] env[61573]: ERROR nova.compute.manager [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1202.988147] env[61573]: DEBUG nova.compute.manager [None req-0f733e81-23b8-4102-b11a-a41f841bc8e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: 546a2a2f-fca3-410f-88c0-f71a820fd2bd] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1203.025768] env[61573]: DEBUG nova.compute.manager [None req-8753a2c9-d3a3-4648-ade4-c5d8d7097b39 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] [instance: 7b28cf4f-5ba2-4fde-8c53-7a403166ae2a] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1203.033961] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0f733e81-23b8-4102-b11a-a41f841bc8e2 tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "546a2a2f-fca3-410f-88c0-f71a820fd2bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.538s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.054731] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1203.081228] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8753a2c9-d3a3-4648-ade4-c5d8d7097b39 tempest-AttachVolumeTestJSON-101081699 tempest-AttachVolumeTestJSON-101081699-project-member] Lock "7b28cf4f-5ba2-4fde-8c53-7a403166ae2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.167s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.082122] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5c2617a4-817c-4058-a7a0-fc8f08eb8e1a tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Acquiring lock "e045b957-1670-4277-a9e3-d4ce1fb24047" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.082122] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5c2617a4-817c-4058-a7a0-fc8f08eb8e1a tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "e045b957-1670-4277-a9e3-d4ce1fb24047" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.083570] env[61573]: ERROR nova.compute.manager [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] exception_handler_v20(status_code, error_body) [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise client_exc(message=error_message, [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Neutron server returns request_ids: ['req-745014d3-d8e2-4fa0-a182-23e0b1e274cd'] [ 1203.083570] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] During handling of the above exception, another exception occurred: [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Traceback (most recent call last): [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._delete_instance(context, instance, bdms) [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._shutdown_instance(context, instance, bdms) [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._try_deallocate_network(context, instance, requested_networks) [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] with excutils.save_and_reraise_exception(): [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1203.083980] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self.force_reraise() [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise self.value [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] _deallocate_network_with_retries() [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return evt.wait() [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] result = hub.switch() [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.greenlet.switch() [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1203.084599] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] result = func(*self.args, **self.kw) [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] result = f(*args, **kwargs) [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._deallocate_network( [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self.network_api.deallocate_for_instance( [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] data = neutron.list_ports(**search_opts) [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.list('ports', self.ports_path, retrieve_all, [ 1203.085193] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] for r in self._pagination(collection, path, **params): [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] res = self.get(path, params=params) [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.retry_request("GET", action, body=body, [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1203.086568] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] return self.do_request(method, action, body=body, [ 1203.087088] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.087088] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] ret = obj(*args, **kwargs) [ 1203.087088] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1203.087088] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] self._handle_fault_response(status_code, replybody, resp) [ 1203.087088] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1203.087088] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1203.087088] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1203.087088] env[61573]: ERROR nova.compute.manager [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] [ 1203.106957] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1203.131585] env[61573]: DEBUG oslo_concurrency.lockutils [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.369s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.133537] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 69.413s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.133717] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] During sync_power_state the instance has a pending task (deleting). Skip. [ 1203.134265] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "8c57d493-12c4-47fe-a355-c9ade98b7158" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.135386] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.137499] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc795b9-42b7-4ce5-a42a-9de3ba877824 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.146725] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a05747c-6efb-4c09-95d7-dab18d6a5eb3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.187046] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151e1fc2-9f04-4113-b803-f29a55164f0b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.198793] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24018be7-1191-48ca-b790-1b3710316a32 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.203572] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.214225] env[61573]: DEBUG nova.compute.provider_tree [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1203.216662] env[61573]: INFO nova.compute.manager [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] [instance: 8c57d493-12c4-47fe-a355-c9ade98b7158] Successfully reverted task state from None on failure for instance. [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server [None req-87a43ea8-c56e-4757-8ec4-a92f11d214b8 tempest-MigrationsAdminTest-352573817 tempest-MigrationsAdminTest-352573817-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-745014d3-d8e2-4fa0-a182-23e0b1e274cd'] [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1203.222066] env[61573]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1203.222714] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1203.223382] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1203.224069] env[61573]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.224667] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.225362] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1203.225937] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1203.225937] env[61573]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1203.225937] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1203.225937] env[61573]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1203.225937] env[61573]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1203.225937] env[61573]: ERROR oslo_messaging.rpc.server [ 1203.225937] env[61573]: DEBUG nova.scheduler.client.report [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1203.238870] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.710s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.239622] env[61573]: ERROR nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] result = getattr(controller, method)(*args, **kwargs) [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._get(image_id) [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1203.239622] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] resp, body = self.http_client.get(url, headers=header) [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.request(url, 'GET', **kwargs) [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._handle_response(resp) [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise exc.from_response(resp, resp.content) [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] During handling of the above exception, another exception occurred: [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.240065] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self.driver.spawn(context, instance, image_meta, [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._fetch_image_if_missing(context, vi) [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] image_fetch(context, vi, tmp_image_ds_loc) [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] images.fetch_image( [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] metadata = IMAGE_API.get(context, image_ref) [ 1203.240466] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return session.show(context, image_id, [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] _reraise_translated_image_exception(image_id) [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise new_exc.with_traceback(exc_trace) [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] result = getattr(controller, method)(*args, **kwargs) [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._get(image_id) [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1203.240894] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] resp, body = self.http_client.get(url, headers=header) [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.request(url, 'GET', **kwargs) [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._handle_response(resp) [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise exc.from_response(resp, resp.content) [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1203.241315] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.241655] env[61573]: DEBUG nova.compute.utils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1203.241655] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.106s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.242916] env[61573]: INFO nova.compute.claims [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1203.245533] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Build of instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 was re-scheduled: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1203.246019] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1203.246218] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1203.246392] env[61573]: DEBUG nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1203.246560] env[61573]: DEBUG nova.network.neutron [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1203.498970] env[61573]: DEBUG neutronclient.v2_0.client [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61573) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1203.500349] env[61573]: ERROR nova.compute.manager [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] result = getattr(controller, method)(*args, **kwargs) [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._get(image_id) [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1203.500349] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] resp, body = self.http_client.get(url, headers=header) [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.request(url, 'GET', **kwargs) [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._handle_response(resp) [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise exc.from_response(resp, resp.content) [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] During handling of the above exception, another exception occurred: [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.500854] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self.driver.spawn(context, instance, image_meta, [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._fetch_image_if_missing(context, vi) [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] image_fetch(context, vi, tmp_image_ds_loc) [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] images.fetch_image( [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] metadata = IMAGE_API.get(context, image_ref) [ 1203.501258] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return session.show(context, image_id, [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] _reraise_translated_image_exception(image_id) [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise new_exc.with_traceback(exc_trace) [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] result = getattr(controller, method)(*args, **kwargs) [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._get(image_id) [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1203.501687] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] resp, body = self.http_client.get(url, headers=header) [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.request(url, 'GET', **kwargs) [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self._handle_response(resp) [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise exc.from_response(resp, resp.content) [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] During handling of the above exception, another exception occurred: [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.502133] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._build_and_run_instance(context, instance, image, [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise exception.RescheduledException( [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] nova.exception.RescheduledException: Build of instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 was re-scheduled: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] During handling of the above exception, another exception occurred: [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1203.502547] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] exception_handler_v20(status_code, error_body) [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise client_exc(message=error_message, [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Neutron server returns request_ids: ['req-8dd9d866-168c-429a-8b6c-a08cbf2f15cb'] [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] During handling of the above exception, another exception occurred: [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._deallocate_network(context, instance, requested_networks) [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self.network_api.deallocate_for_instance( [ 1203.502995] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] data = neutron.list_ports(**search_opts) [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.list('ports', self.ports_path, retrieve_all, [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] for r in self._pagination(collection, path, **params): [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] res = self.get(path, params=params) [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.503596] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.retry_request("GET", action, body=body, [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.do_request(method, action, body=body, [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._handle_fault_response(status_code, replybody, resp) [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise exception.Unauthorized() [ 1203.504076] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] nova.exception.Unauthorized: Not authorized. [ 1203.504522] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.540115] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44f10fc-db52-4b0b-8ba3-2307a5c37a64 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.550964] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2beb34-5466-4ef9-b349-c78a3ecd3603 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.583508] env[61573]: INFO nova.scheduler.client.report [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Deleted allocations for instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 [ 1203.590304] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8eacb48-7a26-4193-8056-84837d45eb3e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.602488] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176c15f5-7f80-4fcf-93d1-b87b544c763b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.608099] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bb973317-d71d-47b4-8b7c-428f5bd852d3 tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 592.734s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.616935] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 394.787s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.617216] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Acquiring lock "338e2879-7dbe-4334-80da-4bbc1a071aa8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.617390] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.617564] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.619699] env[61573]: DEBUG nova.compute.provider_tree [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1203.620988] env[61573]: INFO nova.compute.manager [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Terminating instance [ 1203.625317] env[61573]: DEBUG nova.compute.manager [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1203.625317] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1203.625317] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2809dae-242a-45e8-a459-a5d18845c7b0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.627715] env[61573]: DEBUG nova.scheduler.client.report [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1203.631726] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1203.638497] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c20b56d-d7df-4468-9dc2-34b2984f5406 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.651305] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.410s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.651665] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1203.654453] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.451s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.657248] env[61573]: INFO nova.compute.claims [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1203.675442] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 338e2879-7dbe-4334-80da-4bbc1a071aa8 could not be found. [ 1203.675442] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1203.675442] env[61573]: INFO nova.compute.manager [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1203.675442] env[61573]: DEBUG oslo.service.loopingcall [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1203.675442] env[61573]: DEBUG nova.compute.manager [-] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1203.675752] env[61573]: DEBUG nova.network.neutron [-] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1203.753357] env[61573]: DEBUG nova.compute.utils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1203.761320] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1203.761496] env[61573]: DEBUG nova.network.neutron [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1203.778376] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.782463] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1203.837345] env[61573]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61573) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1203.837345] env[61573]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-81980b68-5a84-49fd-b7cf-af7abfe2cf5c'] [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1203.837437] env[61573]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1203.838382] env[61573]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1203.839091] env[61573]: ERROR oslo.service.loopingcall [ 1203.839566] env[61573]: ERROR nova.compute.manager [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1203.865803] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1203.884036] env[61573]: ERROR nova.compute.manager [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] exception_handler_v20(status_code, error_body) [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise client_exc(message=error_message, [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Neutron server returns request_ids: ['req-81980b68-5a84-49fd-b7cf-af7abfe2cf5c'] [ 1203.884036] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] During handling of the above exception, another exception occurred: [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Traceback (most recent call last): [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._delete_instance(context, instance, bdms) [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._shutdown_instance(context, instance, bdms) [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._try_deallocate_network(context, instance, requested_networks) [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] with excutils.save_and_reraise_exception(): [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1203.884496] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self.force_reraise() [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise self.value [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] _deallocate_network_with_retries() [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return evt.wait() [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] result = hub.switch() [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.greenlet.switch() [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1203.884891] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] result = func(*self.args, **self.kw) [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] result = f(*args, **kwargs) [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._deallocate_network( [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self.network_api.deallocate_for_instance( [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] data = neutron.list_ports(**search_opts) [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.list('ports', self.ports_path, retrieve_all, [ 1203.885363] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] for r in self._pagination(collection, path, **params): [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] res = self.get(path, params=params) [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.retry_request("GET", action, body=body, [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1203.885743] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] return self.do_request(method, action, body=body, [ 1203.886149] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1203.886149] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] ret = obj(*args, **kwargs) [ 1203.886149] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1203.886149] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] self._handle_fault_response(status_code, replybody, resp) [ 1203.886149] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1203.886149] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1203.886149] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1203.886149] env[61573]: ERROR nova.compute.manager [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] [ 1203.907442] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1203.907723] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1203.907893] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1203.908091] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1203.908257] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1203.908409] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1203.908638] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1203.908799] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1203.909200] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1203.909393] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1203.909573] env[61573]: DEBUG nova.virt.hardware [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1203.910819] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f5a53d-014c-4ef1-9ba6-7a8a88db6665 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.923860] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7d56a9-53e7-4d1d-8f62-e64c864cd2b6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.929770] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.313s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.934323] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 70.214s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.934553] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] During sync_power_state the instance has a pending task (deleting). Skip. [ 1203.934743] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "338e2879-7dbe-4334-80da-4bbc1a071aa8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.008865] env[61573]: INFO nova.compute.manager [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] [instance: 338e2879-7dbe-4334-80da-4bbc1a071aa8] Successfully reverted task state from None on failure for instance. [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server [None req-77d07433-1645-4035-9879-8820a35ff41b tempest-DeleteServersAdminTestJSON-303498553 tempest-DeleteServersAdminTestJSON-303498553-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-81980b68-5a84-49fd-b7cf-af7abfe2cf5c'] [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1204.015532] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1204.016361] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1204.017153] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 1204.017929] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1204.018687] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1204.019527] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1204.020239] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1204.020239] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1204.020239] env[61573]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1204.020239] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1204.020239] env[61573]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1204.020239] env[61573]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1204.020239] env[61573]: ERROR oslo_messaging.rpc.server [ 1204.042452] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297bd2fa-bd7d-4e75-87e8-6fb2f14e011f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.051885] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab40671b-8d54-4b47-a71a-ebf6845697be {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.083150] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15806ea6-58af-4f3b-8552-834344a222b8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.091486] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d4a7fe-abc8-4168-aca3-78b31f9bba0c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.107129] env[61573]: DEBUG nova.compute.provider_tree [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.109827] env[61573]: DEBUG nova.policy [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e209cfe158004e46a9693c62a5c2e3f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90325af48fc44451a6c15e089107271a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1204.115245] env[61573]: DEBUG nova.scheduler.client.report [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1204.131920] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.477s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.132512] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1204.135675] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.359s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.137186] env[61573]: INFO nova.compute.claims [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1204.177481] env[61573]: DEBUG nova.compute.utils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1204.179555] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1204.179691] env[61573]: DEBUG nova.network.neutron [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1204.190263] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1204.250553] env[61573]: DEBUG nova.policy [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08c59199cd604147a2f0a2cd0dc95773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e08d442d2b554ce6bd9e2cc031cf6735', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1204.266596] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1204.297675] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1204.297939] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1204.298120] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.298310] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1204.298459] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.298781] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1204.298846] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1204.298960] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1204.299372] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1204.299580] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1204.299761] env[61573]: DEBUG nova.virt.hardware [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1204.300643] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b98474-8df6-4412-bc41-30d54f244eba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.311749] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c232fb6-49d9-4a7e-919b-4303c84c134f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.433899] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd77237-ffd1-4537-858a-119c8d870a2d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.441235] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11d888c-5004-495d-a1fb-09fe8687ccba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.473078] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b67726-59e3-4768-a6bf-a204a629ae1a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.485783] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcedbb5-e719-4cec-a25d-fd4980023464 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.505607] env[61573]: DEBUG nova.compute.provider_tree [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.513942] env[61573]: DEBUG nova.network.neutron [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Successfully created port: 7c7a3ba9-df5f-48d3-a676-4e4f2e615d79 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1204.516632] env[61573]: DEBUG nova.scheduler.client.report [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1204.537020] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.399s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.537020] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1204.577978] env[61573]: DEBUG nova.network.neutron [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Successfully created port: 70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1204.581779] env[61573]: DEBUG nova.compute.utils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1204.585774] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Not allocating networking since 'none' was specified. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1204.597694] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1204.687275] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1204.726065] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1204.726475] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1204.726557] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.726708] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1204.726852] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.727487] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1204.727616] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1204.727874] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1204.728186] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1204.728266] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1204.728435] env[61573]: DEBUG nova.virt.hardware [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1204.729346] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db7df95-f64c-4091-9eb5-c57d80f2aad9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.739980] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6f5a18-24af-4dc7-be44-31dd613391ae {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.753946] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Instance VIF info [] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1204.760363] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Creating folder: Project (8df0e09d5e0344a78c770ce0fb3392ad). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1204.761040] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da3ef33f-ec1b-4123-b978-7772cc000a16 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.773212] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Created folder: Project (8df0e09d5e0344a78c770ce0fb3392ad) in parent group-v942801. [ 1204.773449] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Creating folder: Instances. Parent ref: group-v942870. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1204.773709] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d2d0fbf-b6fa-4ff2-a003-9717462fd774 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.784757] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Created folder: Instances in parent group-v942870. [ 1204.784757] env[61573]: DEBUG oslo.service.loopingcall [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1204.784757] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1204.784757] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8faef560-17f3-4c37-bae0-925f9cdd4aee {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.805029] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1204.805029] env[61573]: value = "task-4836829" [ 1204.805029] env[61573]: _type = "Task" [ 1204.805029] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.813955] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836829, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.314020] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836829, 'name': CreateVM_Task, 'duration_secs': 0.310152} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.314473] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1205.314961] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.315258] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.315745] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1205.316108] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77871b9f-97e5-4a00-a830-951969247b89 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.322469] env[61573]: DEBUG oslo_vmware.api [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Waiting for the task: (returnval){ [ 1205.322469] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cad843-f4b4-db32-ae3b-d8c6ff2795af" [ 1205.322469] env[61573]: _type = "Task" [ 1205.322469] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.332529] env[61573]: DEBUG oslo_vmware.api [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cad843-f4b4-db32-ae3b-d8c6ff2795af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.399393] env[61573]: DEBUG nova.network.neutron [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Successfully updated port: 7c7a3ba9-df5f-48d3-a676-4e4f2e615d79 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1205.415795] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "refresh_cache-59913660-3644-41f2-a422-f814bd69b4a1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.415795] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "refresh_cache-59913660-3644-41f2-a422-f814bd69b4a1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.415795] env[61573]: DEBUG nova.network.neutron [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1205.432901] env[61573]: DEBUG nova.compute.manager [req-e3a4e536-74ee-418c-aacc-c7f9d3d17c67 req-c1bc4388-ddc5-4b35-8f77-3234df4db5f4 service nova] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Received event network-vif-plugged-7c7a3ba9-df5f-48d3-a676-4e4f2e615d79 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1205.433139] env[61573]: DEBUG oslo_concurrency.lockutils [req-e3a4e536-74ee-418c-aacc-c7f9d3d17c67 req-c1bc4388-ddc5-4b35-8f77-3234df4db5f4 service nova] Acquiring lock "59913660-3644-41f2-a422-f814bd69b4a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.435013] env[61573]: DEBUG oslo_concurrency.lockutils [req-e3a4e536-74ee-418c-aacc-c7f9d3d17c67 req-c1bc4388-ddc5-4b35-8f77-3234df4db5f4 service nova] Lock "59913660-3644-41f2-a422-f814bd69b4a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.439025] env[61573]: DEBUG oslo_concurrency.lockutils [req-e3a4e536-74ee-418c-aacc-c7f9d3d17c67 req-c1bc4388-ddc5-4b35-8f77-3234df4db5f4 service nova] Lock "59913660-3644-41f2-a422-f814bd69b4a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.439025] env[61573]: DEBUG nova.compute.manager [req-e3a4e536-74ee-418c-aacc-c7f9d3d17c67 req-c1bc4388-ddc5-4b35-8f77-3234df4db5f4 service nova] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] No waiting events found dispatching network-vif-plugged-7c7a3ba9-df5f-48d3-a676-4e4f2e615d79 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1205.439025] env[61573]: WARNING nova.compute.manager [req-e3a4e536-74ee-418c-aacc-c7f9d3d17c67 req-c1bc4388-ddc5-4b35-8f77-3234df4db5f4 service nova] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Received unexpected event network-vif-plugged-7c7a3ba9-df5f-48d3-a676-4e4f2e615d79 for instance with vm_state building and task_state spawning. [ 1205.486280] env[61573]: DEBUG nova.network.neutron [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Successfully updated port: 70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1205.494037] env[61573]: DEBUG nova.network.neutron [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1205.495458] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "refresh_cache-2d5777f8-a431-43bd-8934-7cc33fd14718" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.495592] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "refresh_cache-2d5777f8-a431-43bd-8934-7cc33fd14718" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.495739] env[61573]: DEBUG nova.network.neutron [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1205.515429] env[61573]: DEBUG nova.compute.manager [req-a7c861c7-3893-4050-a343-7c24c0d38879 req-ee1674ab-3861-4761-9367-69f0c1a30d93 service nova] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Received event network-vif-plugged-70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1205.519021] env[61573]: DEBUG oslo_concurrency.lockutils [req-a7c861c7-3893-4050-a343-7c24c0d38879 req-ee1674ab-3861-4761-9367-69f0c1a30d93 service nova] Acquiring lock "2d5777f8-a431-43bd-8934-7cc33fd14718-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.519021] env[61573]: DEBUG oslo_concurrency.lockutils [req-a7c861c7-3893-4050-a343-7c24c0d38879 req-ee1674ab-3861-4761-9367-69f0c1a30d93 service nova] Lock "2d5777f8-a431-43bd-8934-7cc33fd14718-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.519021] env[61573]: DEBUG oslo_concurrency.lockutils [req-a7c861c7-3893-4050-a343-7c24c0d38879 req-ee1674ab-3861-4761-9367-69f0c1a30d93 service nova] Lock "2d5777f8-a431-43bd-8934-7cc33fd14718-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.519021] env[61573]: DEBUG nova.compute.manager [req-a7c861c7-3893-4050-a343-7c24c0d38879 req-ee1674ab-3861-4761-9367-69f0c1a30d93 service nova] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] No waiting events found dispatching network-vif-plugged-70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1205.519395] env[61573]: WARNING nova.compute.manager [req-a7c861c7-3893-4050-a343-7c24c0d38879 req-ee1674ab-3861-4761-9367-69f0c1a30d93 service nova] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Received unexpected event network-vif-plugged-70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f for instance with vm_state building and task_state spawning. [ 1205.573298] env[61573]: DEBUG nova.network.neutron [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1205.750284] env[61573]: DEBUG nova.network.neutron [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Updating instance_info_cache with network_info: [{"id": "7c7a3ba9-df5f-48d3-a676-4e4f2e615d79", "address": "fa:16:3e:e3:10:d5", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7a3ba9-df", "ovs_interfaceid": "7c7a3ba9-df5f-48d3-a676-4e4f2e615d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.768166] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "refresh_cache-59913660-3644-41f2-a422-f814bd69b4a1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.768611] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Instance network_info: |[{"id": "7c7a3ba9-df5f-48d3-a676-4e4f2e615d79", "address": "fa:16:3e:e3:10:d5", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7a3ba9-df", "ovs_interfaceid": "7c7a3ba9-df5f-48d3-a676-4e4f2e615d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1205.769474] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:10:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c7a3ba9-df5f-48d3-a676-4e4f2e615d79', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1205.778257] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating folder: Project (90325af48fc44451a6c15e089107271a). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1205.778700] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6bc800b2-5a90-482a-a945-4cf6ae0f393f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.790572] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Created folder: Project (90325af48fc44451a6c15e089107271a) in parent group-v942801. [ 1205.790810] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating folder: Instances. Parent ref: group-v942873. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1205.791104] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f65afd91-1d82-4211-b55a-af7e0e16fe44 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.802287] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Created folder: Instances in parent group-v942873. [ 1205.802620] env[61573]: DEBUG oslo.service.loopingcall [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1205.802830] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1205.803059] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1178f37-7233-4fd7-98c2-b91012bebb54 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.823975] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1205.823975] env[61573]: value = "task-4836832" [ 1205.823975] env[61573]: _type = "Task" [ 1205.823975] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.837159] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836832, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.841123] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.841418] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1205.841689] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.085696] env[61573]: DEBUG nova.network.neutron [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Updating instance_info_cache with network_info: [{"id": "70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f", "address": "fa:16:3e:4e:77:59", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70fe87cb-fa", "ovs_interfaceid": "70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.100919] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "refresh_cache-2d5777f8-a431-43bd-8934-7cc33fd14718" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.101253] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Instance network_info: |[{"id": "70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f", "address": "fa:16:3e:4e:77:59", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70fe87cb-fa", "ovs_interfaceid": "70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1206.101726] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:77:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1206.109278] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating folder: Project (e08d442d2b554ce6bd9e2cc031cf6735). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1206.109903] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d784bc7-f613-465d-b765-d19222d8adbd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.124665] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Created folder: Project (e08d442d2b554ce6bd9e2cc031cf6735) in parent group-v942801. [ 1206.124879] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating folder: Instances. Parent ref: group-v942876. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1206.125154] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53f71abf-c6dd-4981-9c48-65e071551759 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.135871] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Created folder: Instances in parent group-v942876. [ 1206.136167] env[61573]: DEBUG oslo.service.loopingcall [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1206.136462] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1206.136765] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d91a7c2e-bd95-49ab-bd74-ff2eae84d687 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.158472] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1206.158472] env[61573]: value = "task-4836835" [ 1206.158472] env[61573]: _type = "Task" [ 1206.158472] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.166509] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836835, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.340191] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836832, 'name': CreateVM_Task, 'duration_secs': 0.323381} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.340494] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1206.341366] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.341624] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.341960] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1206.342250] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b91e7f94-55ca-4a12-80c1-508ca79a455f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.348471] env[61573]: DEBUG oslo_vmware.api [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 1206.348471] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]528b0f81-9672-388d-b80c-d2f374d5c5e1" [ 1206.348471] env[61573]: _type = "Task" [ 1206.348471] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.357420] env[61573]: DEBUG oslo_vmware.api [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]528b0f81-9672-388d-b80c-d2f374d5c5e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.669119] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836835, 'name': CreateVM_Task, 'duration_secs': 0.306844} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.669287] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1206.669916] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.859103] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.859103] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1206.859405] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.859405] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.859661] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1206.859899] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b42d83f-ba24-4046-abaf-0de36e6143e9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.866605] env[61573]: DEBUG oslo_vmware.api [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 1206.866605] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52712f6e-b328-2aa3-df8d-f495430c9104" [ 1206.866605] env[61573]: _type = "Task" [ 1206.866605] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.875225] env[61573]: DEBUG oslo_vmware.api [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52712f6e-b328-2aa3-df8d-f495430c9104, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.378713] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.379094] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1207.379365] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1207.462320] env[61573]: DEBUG nova.compute.manager [req-976b828d-5356-497f-8f37-e6799fed48de req-97fa8f1c-2699-4b18-be88-06b972be2221 service nova] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Received event network-changed-7c7a3ba9-df5f-48d3-a676-4e4f2e615d79 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1207.462783] env[61573]: DEBUG nova.compute.manager [req-976b828d-5356-497f-8f37-e6799fed48de req-97fa8f1c-2699-4b18-be88-06b972be2221 service nova] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Refreshing instance network info cache due to event network-changed-7c7a3ba9-df5f-48d3-a676-4e4f2e615d79. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1207.462783] env[61573]: DEBUG oslo_concurrency.lockutils [req-976b828d-5356-497f-8f37-e6799fed48de req-97fa8f1c-2699-4b18-be88-06b972be2221 service nova] Acquiring lock "refresh_cache-59913660-3644-41f2-a422-f814bd69b4a1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1207.463075] env[61573]: DEBUG oslo_concurrency.lockutils [req-976b828d-5356-497f-8f37-e6799fed48de req-97fa8f1c-2699-4b18-be88-06b972be2221 service nova] Acquired lock "refresh_cache-59913660-3644-41f2-a422-f814bd69b4a1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.463075] env[61573]: DEBUG nova.network.neutron [req-976b828d-5356-497f-8f37-e6799fed48de req-97fa8f1c-2699-4b18-be88-06b972be2221 service nova] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Refreshing network info cache for port 7c7a3ba9-df5f-48d3-a676-4e4f2e615d79 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1207.545915] env[61573]: DEBUG nova.compute.manager [req-63cdd599-29d8-4e24-9fd3-347748bf2ccb req-8b649c44-6739-4a0a-a0cb-b38347e93153 service nova] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Received event network-changed-70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1207.546139] env[61573]: DEBUG nova.compute.manager [req-63cdd599-29d8-4e24-9fd3-347748bf2ccb req-8b649c44-6739-4a0a-a0cb-b38347e93153 service nova] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Refreshing instance network info cache due to event network-changed-70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1207.546440] env[61573]: DEBUG oslo_concurrency.lockutils [req-63cdd599-29d8-4e24-9fd3-347748bf2ccb req-8b649c44-6739-4a0a-a0cb-b38347e93153 service nova] Acquiring lock "refresh_cache-2d5777f8-a431-43bd-8934-7cc33fd14718" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1207.546609] env[61573]: DEBUG oslo_concurrency.lockutils [req-63cdd599-29d8-4e24-9fd3-347748bf2ccb req-8b649c44-6739-4a0a-a0cb-b38347e93153 service nova] Acquired lock "refresh_cache-2d5777f8-a431-43bd-8934-7cc33fd14718" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.546774] env[61573]: DEBUG nova.network.neutron [req-63cdd599-29d8-4e24-9fd3-347748bf2ccb req-8b649c44-6739-4a0a-a0cb-b38347e93153 service nova] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Refreshing network info cache for port 70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1207.851613] env[61573]: DEBUG nova.network.neutron [req-976b828d-5356-497f-8f37-e6799fed48de req-97fa8f1c-2699-4b18-be88-06b972be2221 service nova] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Updated VIF entry in instance network info cache for port 7c7a3ba9-df5f-48d3-a676-4e4f2e615d79. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1207.851981] env[61573]: DEBUG nova.network.neutron [req-976b828d-5356-497f-8f37-e6799fed48de req-97fa8f1c-2699-4b18-be88-06b972be2221 service nova] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Updating instance_info_cache with network_info: [{"id": "7c7a3ba9-df5f-48d3-a676-4e4f2e615d79", "address": "fa:16:3e:e3:10:d5", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7a3ba9-df", "ovs_interfaceid": "7c7a3ba9-df5f-48d3-a676-4e4f2e615d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.861893] env[61573]: DEBUG oslo_concurrency.lockutils [req-976b828d-5356-497f-8f37-e6799fed48de req-97fa8f1c-2699-4b18-be88-06b972be2221 service nova] Releasing lock "refresh_cache-59913660-3644-41f2-a422-f814bd69b4a1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.906945] env[61573]: DEBUG nova.network.neutron [req-63cdd599-29d8-4e24-9fd3-347748bf2ccb req-8b649c44-6739-4a0a-a0cb-b38347e93153 service nova] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Updated VIF entry in instance network info cache for port 70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1207.907293] env[61573]: DEBUG nova.network.neutron [req-63cdd599-29d8-4e24-9fd3-347748bf2ccb req-8b649c44-6739-4a0a-a0cb-b38347e93153 service nova] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Updating instance_info_cache with network_info: [{"id": "70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f", "address": "fa:16:3e:4e:77:59", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70fe87cb-fa", "ovs_interfaceid": "70fe87cb-fa77-4fc7-9c87-e3981dbe8e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.920805] env[61573]: DEBUG oslo_concurrency.lockutils [req-63cdd599-29d8-4e24-9fd3-347748bf2ccb req-8b649c44-6739-4a0a-a0cb-b38347e93153 service nova] Releasing lock "refresh_cache-2d5777f8-a431-43bd-8934-7cc33fd14718" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1211.186983] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "59913660-3644-41f2-a422-f814bd69b4a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.704518] env[61573]: DEBUG oslo_concurrency.lockutils [None req-712d33dc-ffb4-43c2-bca0-e8abbf75ab3f tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "89d2c816-dcac-41fd-b760-d93348dbbebd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.704976] env[61573]: DEBUG oslo_concurrency.lockutils [None req-712d33dc-ffb4-43c2-bca0-e8abbf75ab3f tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "89d2c816-dcac-41fd-b760-d93348dbbebd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.425072] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.404133] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.403498] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.403850] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1226.404587] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.242850] env[61573]: DEBUG oslo_concurrency.lockutils [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "2d5777f8-a431-43bd-8934-7cc33fd14718" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.399047] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.403576] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.404031] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.416833] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.417125] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.417379] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.417790] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1227.419256] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd428d11-2ed4-409d-9267-fa6f9bed75ee {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.429601] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7e018e-2ce2-4b55-818c-f466f8d891aa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.444518] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dd9731-7d46-4079-bc95-4be8ef6615a8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.452399] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d65f71-8dae-4a4c-86f5-afe632c9f560 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.483049] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180560MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1227.483202] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.483393] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.564739] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.564896] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.565037] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.565158] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.565276] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.565385] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.565497] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.565664] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.565763] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.565878] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1227.579035] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1227.590386] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1227.601368] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1227.612997] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 92145176-a567-4098-a9d4-f74a9316e38c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1227.623012] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1316abcb-b4b8-4e7d-858e-55a4db29b429 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1227.633198] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1227.642499] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e045b957-1670-4277-a9e3-d4ce1fb24047 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1227.653509] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 89d2c816-dcac-41fd-b760-d93348dbbebd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1227.653509] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1227.653823] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '57', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'io_workload': '10', 'num_proj_27a816079ac04dabaf85dec4005df607': '1', 'num_proj_09c0dcd38fb64257ba2c08c59f75f097': '1', 'num_proj_c510d515c28b4abb9f0dcc76c0032b88': '1', 'num_proj_4ebf7b3bebe84e8f8a46532726f8935b': '1', 'num_proj_b194ec23978b411dbedfcb7095dbb743': '1', 'num_proj_38a7a73ba2f74211829146bc750ec0aa': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_task_spawning': '1', 'num_proj_8df0e09d5e0344a78c770ce0fb3392ad': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1227.881997] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a93a39-2d3a-40bf-af18-d62a72c74d25 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.890400] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d71f76-d803-42ca-a75d-30056f42fe92 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.921514] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7b517e-7359-4f85-a166-587edc27e68b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.930060] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a1faf8-4415-448b-8b83-a2f264b1d0c6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.943818] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.952854] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1227.969409] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1227.969629] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.486s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.800857] env[61573]: DEBUG oslo_concurrency.lockutils [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "1605911c-cf22-4206-b911-92b2a137dc84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.969635] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.969635] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1228.969783] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1228.991835] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.991988] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.992195] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.992327] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.992452] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.992570] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.992685] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.992801] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.992914] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.993040] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1228.993161] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1228.993676] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.403272] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.336601] env[61573]: WARNING oslo_vmware.rw_handles [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1248.336601] env[61573]: ERROR oslo_vmware.rw_handles [ 1248.336601] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/35e5b865-c1a5-42b0-8340-1d68f8ce27e7/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1248.338489] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1248.338734] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Copying Virtual Disk [datastore2] vmware_temp/35e5b865-c1a5-42b0-8340-1d68f8ce27e7/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/35e5b865-c1a5-42b0-8340-1d68f8ce27e7/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1248.339108] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a4a9930-f586-4d53-badb-f161509c0db9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.348242] env[61573]: DEBUG oslo_vmware.api [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for the task: (returnval){ [ 1248.348242] env[61573]: value = "task-4836836" [ 1248.348242] env[61573]: _type = "Task" [ 1248.348242] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.357322] env[61573]: DEBUG oslo_vmware.api [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': task-4836836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.858759] env[61573]: DEBUG oslo_vmware.exceptions [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1248.859071] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.859674] env[61573]: ERROR nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1248.859674] env[61573]: Faults: ['InvalidArgument'] [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Traceback (most recent call last): [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] yield resources [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] self.driver.spawn(context, instance, image_meta, [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] self._fetch_image_if_missing(context, vi) [ 1248.859674] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] image_cache(vi, tmp_image_ds_loc) [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] vm_util.copy_virtual_disk( [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] session._wait_for_task(vmdk_copy_task) [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] return self.wait_for_task(task_ref) [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] return evt.wait() [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] result = hub.switch() [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1248.860110] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] return self.greenlet.switch() [ 1248.860532] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1248.860532] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] self.f(*self.args, **self.kw) [ 1248.860532] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1248.860532] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] raise exceptions.translate_fault(task_info.error) [ 1248.860532] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1248.860532] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Faults: ['InvalidArgument'] [ 1248.860532] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] [ 1248.860532] env[61573]: INFO nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Terminating instance [ 1248.861642] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.861860] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1248.862127] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6296294c-c1d3-4f6d-b481-84f9d2592095 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.865999] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1248.866264] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1248.867076] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367df147-a2f2-4eae-ac4d-c7f09a604fc0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.871311] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1248.871492] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1248.872530] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7004dbe3-4358-4e53-b06e-9003d9e0357c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.876876] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1248.878146] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-973dfb8e-e0fe-426e-95e7-0e3ee2b7dd0a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.881392] env[61573]: DEBUG oslo_vmware.api [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Waiting for the task: (returnval){ [ 1248.881392] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cfafde-ed71-ae9f-f5d8-e12fc7f61ed6" [ 1248.881392] env[61573]: _type = "Task" [ 1248.881392] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.891454] env[61573]: DEBUG oslo_vmware.api [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cfafde-ed71-ae9f-f5d8-e12fc7f61ed6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.957592] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1248.957592] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1248.957775] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Deleting the datastore file [datastore2] 0d91230d-849f-4e64-8685-5298ee5ea5b1 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1248.958079] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b583fa71-3457-4883-b9f0-76a53b8f1ab9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.966376] env[61573]: DEBUG oslo_vmware.api [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for the task: (returnval){ [ 1248.966376] env[61573]: value = "task-4836838" [ 1248.966376] env[61573]: _type = "Task" [ 1248.966376] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.974462] env[61573]: DEBUG oslo_vmware.api [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': task-4836838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.392016] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1249.392415] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Creating directory with path [datastore2] vmware_temp/35f13652-99e8-4266-ad29-130441c7bf72/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1249.392599] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5338a6e-56ca-4ac4-8dad-cb6df11ec808 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.404385] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Created directory with path [datastore2] vmware_temp/35f13652-99e8-4266-ad29-130441c7bf72/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1249.404612] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Fetch image to [datastore2] vmware_temp/35f13652-99e8-4266-ad29-130441c7bf72/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1249.404744] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/35f13652-99e8-4266-ad29-130441c7bf72/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1249.405521] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23916a7e-a884-4e45-8f4b-46ec7613a093 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.412456] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c51bc8e-0761-4f0e-900c-8b735c3aaa17 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.422828] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12743cab-12c3-4049-af37-61d496e889a5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.453123] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdf5ebc-a91b-4aac-9a7b-0b1cdb2bef0d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.459843] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-97021ec4-7245-4536-9780-070a6e25e4b7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.475362] env[61573]: DEBUG oslo_vmware.api [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': task-4836838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082172} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.475792] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1249.475970] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1249.476923] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1249.476923] env[61573]: INFO nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1249.478999] env[61573]: DEBUG nova.compute.claims [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1249.479190] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.479409] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.491265] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1249.549401] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35f13652-99e8-4266-ad29-130441c7bf72/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1249.610921] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1249.611129] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35f13652-99e8-4266-ad29-130441c7bf72/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1249.841027] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40661898-9c4b-43c8-9b57-d55856837510 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.849390] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bb097e-03e0-4d32-bb2e-ae07d4e25108 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.881625] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07f4c1d-bbe0-4879-8716-dbdeaff838b4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.889690] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3508c7de-8729-42d7-b3ba-3de8d0a8a87f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.903322] env[61573]: DEBUG nova.compute.provider_tree [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1249.914248] env[61573]: DEBUG nova.scheduler.client.report [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1249.930714] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.451s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.931417] env[61573]: ERROR nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1249.931417] env[61573]: Faults: ['InvalidArgument'] [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Traceback (most recent call last): [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] self.driver.spawn(context, instance, image_meta, [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] self._fetch_image_if_missing(context, vi) [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] image_cache(vi, tmp_image_ds_loc) [ 1249.931417] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] vm_util.copy_virtual_disk( [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] session._wait_for_task(vmdk_copy_task) [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] return self.wait_for_task(task_ref) [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] return evt.wait() [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] result = hub.switch() [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] return self.greenlet.switch() [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1249.931883] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] self.f(*self.args, **self.kw) [ 1249.932353] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1249.932353] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] raise exceptions.translate_fault(task_info.error) [ 1249.932353] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1249.932353] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Faults: ['InvalidArgument'] [ 1249.932353] env[61573]: ERROR nova.compute.manager [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] [ 1249.932353] env[61573]: DEBUG nova.compute.utils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1249.933673] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Build of instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 was re-scheduled: A specified parameter was not correct: fileType [ 1249.933673] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1249.934078] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1249.934254] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1249.934422] env[61573]: DEBUG nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1249.934616] env[61573]: DEBUG nova.network.neutron [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1250.653737] env[61573]: DEBUG nova.network.neutron [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.669381] env[61573]: INFO nova.compute.manager [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Took 0.73 seconds to deallocate network for instance. [ 1250.801779] env[61573]: INFO nova.scheduler.client.report [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Deleted allocations for instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 [ 1250.835184] env[61573]: DEBUG oslo_concurrency.lockutils [None req-baab40d5-ce4c-4d52-8469-eef7e1df988e tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.602s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.836532] env[61573]: DEBUG oslo_concurrency.lockutils [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 388.027s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.836768] env[61573]: DEBUG oslo_concurrency.lockutils [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "0d91230d-849f-4e64-8685-5298ee5ea5b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.837035] env[61573]: DEBUG oslo_concurrency.lockutils [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.837196] env[61573]: DEBUG oslo_concurrency.lockutils [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.844881] env[61573]: INFO nova.compute.manager [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Terminating instance [ 1250.847255] env[61573]: DEBUG nova.compute.manager [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1250.847488] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1250.847735] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73755ef5-dc1a-4ae3-8041-d0b1d50aef42 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.858199] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36643875-f473-4b0a-b862-d915c64bd976 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.874106] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1250.903787] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0d91230d-849f-4e64-8685-5298ee5ea5b1 could not be found. [ 1250.903950] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1250.904145] env[61573]: INFO nova.compute.manager [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1250.904435] env[61573]: DEBUG oslo.service.loopingcall [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1250.904773] env[61573]: DEBUG nova.compute.manager [-] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1250.904824] env[61573]: DEBUG nova.network.neutron [-] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1250.940798] env[61573]: DEBUG nova.network.neutron [-] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.951607] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.951862] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.953531] env[61573]: INFO nova.compute.claims [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1250.957342] env[61573]: INFO nova.compute.manager [-] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] Took 0.05 seconds to deallocate network for instance. [ 1251.065944] env[61573]: DEBUG oslo_concurrency.lockutils [None req-95df3961-cadf-4dad-84d0-52ec359279d0 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.066897] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 117.346s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1251.067384] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0d91230d-849f-4e64-8685-5298ee5ea5b1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1251.067602] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "0d91230d-849f-4e64-8685-5298ee5ea5b1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.283727] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba89e42d-57c8-4b0c-b7d5-94bbac920531 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.292551] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ff759f-57a3-417a-8b16-1f96798ad2a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.323439] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51066411-3b48-4e21-a39d-f7d6db58937e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.333026] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63dd676-9668-4328-839e-4837a72ff41b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.348753] env[61573]: DEBUG nova.compute.provider_tree [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.358780] env[61573]: DEBUG nova.scheduler.client.report [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1251.373914] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.422s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.374337] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1251.410780] env[61573]: DEBUG nova.compute.utils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1251.412761] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1251.412761] env[61573]: DEBUG nova.network.neutron [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1251.427022] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1251.497493] env[61573]: DEBUG nova.policy [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47f43ce2fa4a4c8190358ef608c7e52a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de5ad5b083124bcab1da2e5a2ae152c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1251.506056] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1251.537176] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1251.537472] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1251.537682] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1251.537928] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1251.538104] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1251.538261] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1251.538478] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1251.538641] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1251.538830] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1251.539009] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1251.539197] env[61573]: DEBUG nova.virt.hardware [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1251.540139] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a278b31e-142d-4913-b629-5932963200e3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.551762] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02afb914-fb16-4c7f-84aa-1b8ac26d0b7b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.027207] env[61573]: DEBUG nova.network.neutron [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Successfully created port: f9db95cf-0e25-4225-aa24-182fe7062afe {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1252.747730] env[61573]: DEBUG nova.compute.manager [req-8c0f01b4-d469-4ab6-854f-99d8ec099f4f req-5671bc0d-86fd-4caa-9c1a-ef816beacd04 service nova] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Received event network-vif-plugged-f9db95cf-0e25-4225-aa24-182fe7062afe {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1252.747953] env[61573]: DEBUG oslo_concurrency.lockutils [req-8c0f01b4-d469-4ab6-854f-99d8ec099f4f req-5671bc0d-86fd-4caa-9c1a-ef816beacd04 service nova] Acquiring lock "4b512941-2180-44a7-a69d-b54e57856cb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.748185] env[61573]: DEBUG oslo_concurrency.lockutils [req-8c0f01b4-d469-4ab6-854f-99d8ec099f4f req-5671bc0d-86fd-4caa-9c1a-ef816beacd04 service nova] Lock "4b512941-2180-44a7-a69d-b54e57856cb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.748354] env[61573]: DEBUG oslo_concurrency.lockutils [req-8c0f01b4-d469-4ab6-854f-99d8ec099f4f req-5671bc0d-86fd-4caa-9c1a-ef816beacd04 service nova] Lock "4b512941-2180-44a7-a69d-b54e57856cb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.748520] env[61573]: DEBUG nova.compute.manager [req-8c0f01b4-d469-4ab6-854f-99d8ec099f4f req-5671bc0d-86fd-4caa-9c1a-ef816beacd04 service nova] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] No waiting events found dispatching network-vif-plugged-f9db95cf-0e25-4225-aa24-182fe7062afe {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1252.748684] env[61573]: WARNING nova.compute.manager [req-8c0f01b4-d469-4ab6-854f-99d8ec099f4f req-5671bc0d-86fd-4caa-9c1a-ef816beacd04 service nova] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Received unexpected event network-vif-plugged-f9db95cf-0e25-4225-aa24-182fe7062afe for instance with vm_state building and task_state spawning. [ 1252.876314] env[61573]: DEBUG nova.network.neutron [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Successfully updated port: f9db95cf-0e25-4225-aa24-182fe7062afe {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1252.888425] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "refresh_cache-4b512941-2180-44a7-a69d-b54e57856cb0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.888601] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquired lock "refresh_cache-4b512941-2180-44a7-a69d-b54e57856cb0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.888766] env[61573]: DEBUG nova.network.neutron [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1252.956130] env[61573]: DEBUG nova.network.neutron [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1253.201062] env[61573]: DEBUG nova.network.neutron [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Updating instance_info_cache with network_info: [{"id": "f9db95cf-0e25-4225-aa24-182fe7062afe", "address": "fa:16:3e:76:e9:a8", "network": {"id": "4884c726-4c32-4eb5-8f19-7ce926c511ad", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1948393391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de5ad5b083124bcab1da2e5a2ae152c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9db95cf-0e", "ovs_interfaceid": "f9db95cf-0e25-4225-aa24-182fe7062afe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.212207] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Releasing lock "refresh_cache-4b512941-2180-44a7-a69d-b54e57856cb0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.212513] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Instance network_info: |[{"id": "f9db95cf-0e25-4225-aa24-182fe7062afe", "address": "fa:16:3e:76:e9:a8", "network": {"id": "4884c726-4c32-4eb5-8f19-7ce926c511ad", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1948393391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de5ad5b083124bcab1da2e5a2ae152c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9db95cf-0e", "ovs_interfaceid": "f9db95cf-0e25-4225-aa24-182fe7062afe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1253.212925] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:e9:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9db95cf-0e25-4225-aa24-182fe7062afe', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1253.221805] env[61573]: DEBUG oslo.service.loopingcall [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1253.221805] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1253.221960] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a74c8015-d6ec-4de3-afc4-d2e7edbf9730 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.242917] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1253.242917] env[61573]: value = "task-4836839" [ 1253.242917] env[61573]: _type = "Task" [ 1253.242917] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.251768] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836839, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.754332] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836839, 'name': CreateVM_Task, 'duration_secs': 0.305179} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.754513] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1253.755206] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.755384] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.755713] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1253.755988] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e900680d-3161-4e9e-a4cc-a9d8296c5887 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.761544] env[61573]: DEBUG oslo_vmware.api [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for the task: (returnval){ [ 1253.761544] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]527f6732-02f0-2db1-f1d7-be4c9ef4570a" [ 1253.761544] env[61573]: _type = "Task" [ 1253.761544] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.770724] env[61573]: DEBUG oslo_vmware.api [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]527f6732-02f0-2db1-f1d7-be4c9ef4570a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.273601] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.274022] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1254.274097] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.780438] env[61573]: DEBUG nova.compute.manager [req-f9e17c6f-94ce-44f5-9e39-13481d91614d req-8133bd9a-6e67-4669-a007-97f9c98e370d service nova] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Received event network-changed-f9db95cf-0e25-4225-aa24-182fe7062afe {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1254.780650] env[61573]: DEBUG nova.compute.manager [req-f9e17c6f-94ce-44f5-9e39-13481d91614d req-8133bd9a-6e67-4669-a007-97f9c98e370d service nova] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Refreshing instance network info cache due to event network-changed-f9db95cf-0e25-4225-aa24-182fe7062afe. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1254.780841] env[61573]: DEBUG oslo_concurrency.lockutils [req-f9e17c6f-94ce-44f5-9e39-13481d91614d req-8133bd9a-6e67-4669-a007-97f9c98e370d service nova] Acquiring lock "refresh_cache-4b512941-2180-44a7-a69d-b54e57856cb0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.780983] env[61573]: DEBUG oslo_concurrency.lockutils [req-f9e17c6f-94ce-44f5-9e39-13481d91614d req-8133bd9a-6e67-4669-a007-97f9c98e370d service nova] Acquired lock "refresh_cache-4b512941-2180-44a7-a69d-b54e57856cb0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.781166] env[61573]: DEBUG nova.network.neutron [req-f9e17c6f-94ce-44f5-9e39-13481d91614d req-8133bd9a-6e67-4669-a007-97f9c98e370d service nova] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Refreshing network info cache for port f9db95cf-0e25-4225-aa24-182fe7062afe {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1255.130740] env[61573]: DEBUG nova.network.neutron [req-f9e17c6f-94ce-44f5-9e39-13481d91614d req-8133bd9a-6e67-4669-a007-97f9c98e370d service nova] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Updated VIF entry in instance network info cache for port f9db95cf-0e25-4225-aa24-182fe7062afe. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1255.131127] env[61573]: DEBUG nova.network.neutron [req-f9e17c6f-94ce-44f5-9e39-13481d91614d req-8133bd9a-6e67-4669-a007-97f9c98e370d service nova] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Updating instance_info_cache with network_info: [{"id": "f9db95cf-0e25-4225-aa24-182fe7062afe", "address": "fa:16:3e:76:e9:a8", "network": {"id": "4884c726-4c32-4eb5-8f19-7ce926c511ad", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1948393391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de5ad5b083124bcab1da2e5a2ae152c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9db95cf-0e", "ovs_interfaceid": "f9db95cf-0e25-4225-aa24-182fe7062afe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.143022] env[61573]: DEBUG oslo_concurrency.lockutils [req-f9e17c6f-94ce-44f5-9e39-13481d91614d req-8133bd9a-6e67-4669-a007-97f9c98e370d service nova] Releasing lock "refresh_cache-4b512941-2180-44a7-a69d-b54e57856cb0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.524889] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "4b512941-2180-44a7-a69d-b54e57856cb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.403396] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.404296] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.404613] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.404686] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1288.403786] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1288.404094] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1288.417047] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.417351] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.417500] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.417641] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1288.418837] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff87b4a-b9e4-4e0a-a90d-88fb221c3840 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.428647] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea03d403-e213-4814-b7c5-40995b8e8ed7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.442919] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f29134-00a6-49d4-beac-eb4b10a64641 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.449619] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c63596-5148-4126-b737-2cefdc8c48e4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.479517] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180543MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1288.479727] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.479869] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.561204] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 36a5ff6d-5123-4323-8e86-3529828af0ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.561374] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.561515] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.561625] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.561741] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.561854] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.561967] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.562090] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.562200] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.562309] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1288.573659] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1288.584586] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1288.594750] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 92145176-a567-4098-a9d4-f74a9316e38c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1288.605423] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1316abcb-b4b8-4e7d-858e-55a4db29b429 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1288.615649] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1288.628356] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e045b957-1670-4277-a9e3-d4ce1fb24047 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1288.639158] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 89d2c816-dcac-41fd-b760-d93348dbbebd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1288.639418] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1288.639587] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '58', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_27a816079ac04dabaf85dec4005df607': '1', 'io_workload': '10', 'num_proj_09c0dcd38fb64257ba2c08c59f75f097': '1', 'num_proj_c510d515c28b4abb9f0dcc76c0032b88': '1', 'num_proj_4ebf7b3bebe84e8f8a46532726f8935b': '1', 'num_proj_b194ec23978b411dbedfcb7095dbb743': '1', 'num_proj_38a7a73ba2f74211829146bc750ec0aa': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_8df0e09d5e0344a78c770ce0fb3392ad': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1288.849027] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f18d43-5c62-4e82-9061-aa8a2b8ded0c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.856851] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbb05cb-01ec-4aec-ac2d-d4fcdef7dff9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.886106] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f440d02-8227-4199-b939-417331b6059d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.893611] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f799a00a-16b7-4f04-987f-a6d4a903ff6e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.908023] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1288.916356] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1288.931266] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1288.931452] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.452s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.927151] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.927621] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.927662] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1289.927767] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1289.949159] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.949310] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.949423] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.949549] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.949668] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.949785] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.949900] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.950028] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.950146] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.950261] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1289.950375] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1289.950831] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.951045] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.258406] env[61573]: WARNING oslo_vmware.rw_handles [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1296.258406] env[61573]: ERROR oslo_vmware.rw_handles [ 1296.259029] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/35f13652-99e8-4266-ad29-130441c7bf72/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1296.260856] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1296.261131] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Copying Virtual Disk [datastore2] vmware_temp/35f13652-99e8-4266-ad29-130441c7bf72/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/35f13652-99e8-4266-ad29-130441c7bf72/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1296.261443] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f44b1de7-ca45-4c04-a882-965454ef3573 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.270186] env[61573]: DEBUG oslo_vmware.api [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Waiting for the task: (returnval){ [ 1296.270186] env[61573]: value = "task-4836840" [ 1296.270186] env[61573]: _type = "Task" [ 1296.270186] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.278717] env[61573]: DEBUG oslo_vmware.api [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Task: {'id': task-4836840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.781010] env[61573]: DEBUG oslo_vmware.exceptions [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1296.781325] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.781885] env[61573]: ERROR nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1296.781885] env[61573]: Faults: ['InvalidArgument'] [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Traceback (most recent call last): [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] yield resources [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] self.driver.spawn(context, instance, image_meta, [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] self._fetch_image_if_missing(context, vi) [ 1296.781885] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] image_cache(vi, tmp_image_ds_loc) [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] vm_util.copy_virtual_disk( [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] session._wait_for_task(vmdk_copy_task) [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] return self.wait_for_task(task_ref) [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] return evt.wait() [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] result = hub.switch() [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1296.782196] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] return self.greenlet.switch() [ 1296.782550] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1296.782550] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] self.f(*self.args, **self.kw) [ 1296.782550] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1296.782550] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] raise exceptions.translate_fault(task_info.error) [ 1296.782550] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1296.782550] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Faults: ['InvalidArgument'] [ 1296.782550] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] [ 1296.782550] env[61573]: INFO nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Terminating instance [ 1296.783923] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.784093] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.784429] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84ad1af0-4839-43de-b1cb-d97ad3ad58be {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.786874] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1296.787094] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1296.788032] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385f6c41-ef95-4cfb-ba6a-48dfe9be2850 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.797218] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1296.797218] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfca00a7-48b9-43ff-b9ce-1c8e7c80d29b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.798733] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.798733] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1296.799406] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b911812-7e4a-4178-b480-883c9bc4c48b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.804760] env[61573]: DEBUG oslo_vmware.api [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Waiting for the task: (returnval){ [ 1296.804760] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cb8edc-9315-860a-5423-ed26c8be098b" [ 1296.804760] env[61573]: _type = "Task" [ 1296.804760] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.813167] env[61573]: DEBUG oslo_vmware.api [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cb8edc-9315-860a-5423-ed26c8be098b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.866873] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1296.867043] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1296.868062] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Deleting the datastore file [datastore2] 36a5ff6d-5123-4323-8e86-3529828af0ab {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1296.868062] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e94b9a60-8f54-48d3-982f-d225e710f83d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.874515] env[61573]: DEBUG oslo_vmware.api [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Waiting for the task: (returnval){ [ 1296.874515] env[61573]: value = "task-4836842" [ 1296.874515] env[61573]: _type = "Task" [ 1296.874515] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.882875] env[61573]: DEBUG oslo_vmware.api [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Task: {'id': task-4836842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.315654] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1297.316178] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Creating directory with path [datastore2] vmware_temp/e2e51076-69ee-4eb2-b470-bb8a7f33ded0/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1297.316291] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b1b21e5-50d8-4b1f-8a4d-ffdf78a8d008 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.329121] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Created directory with path [datastore2] vmware_temp/e2e51076-69ee-4eb2-b470-bb8a7f33ded0/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1297.329334] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Fetch image to [datastore2] vmware_temp/e2e51076-69ee-4eb2-b470-bb8a7f33ded0/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1297.329506] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/e2e51076-69ee-4eb2-b470-bb8a7f33ded0/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1297.330320] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56ab0fb-6c0f-4c8f-b53f-40d490128082 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.337877] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8962c3-7f1e-4d49-b4e6-fd4ff8b7df48 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.347113] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec765f5a-23e6-45b7-b38f-6bf565c908cb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.381530] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc5bbb4-d422-4b66-a6a7-9f44ffcb2f1b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.389780] env[61573]: DEBUG oslo_vmware.api [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Task: {'id': task-4836842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077359} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.391817] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1297.392062] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1297.392252] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1297.392437] env[61573]: INFO nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1297.394918] env[61573]: DEBUG nova.compute.claims [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1297.395109] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.395339] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.398339] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2b70d152-ca68-43c7-b1c4-8a8faf46166f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.424350] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1297.488103] env[61573]: DEBUG oslo_vmware.rw_handles [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e2e51076-69ee-4eb2-b470-bb8a7f33ded0/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1297.549859] env[61573]: DEBUG oslo_vmware.rw_handles [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1297.550076] env[61573]: DEBUG oslo_vmware.rw_handles [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e2e51076-69ee-4eb2-b470-bb8a7f33ded0/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1297.734348] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c538be5f-0226-4d40-b347-730a4d4e5777 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.743037] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9fd031c-7526-4860-ba81-defb323e69a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.774687] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b234fc1-31e0-4650-83e4-53e249cbbc80 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.783221] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53c3e39-833a-482b-a502-796db2407632 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.797062] env[61573]: DEBUG nova.compute.provider_tree [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.806167] env[61573]: DEBUG nova.scheduler.client.report [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1297.820494] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.425s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.821038] env[61573]: ERROR nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1297.821038] env[61573]: Faults: ['InvalidArgument'] [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Traceback (most recent call last): [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] self.driver.spawn(context, instance, image_meta, [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] self._fetch_image_if_missing(context, vi) [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] image_cache(vi, tmp_image_ds_loc) [ 1297.821038] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] vm_util.copy_virtual_disk( [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] session._wait_for_task(vmdk_copy_task) [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] return self.wait_for_task(task_ref) [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] return evt.wait() [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] result = hub.switch() [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] return self.greenlet.switch() [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1297.821494] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] self.f(*self.args, **self.kw) [ 1297.822021] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1297.822021] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] raise exceptions.translate_fault(task_info.error) [ 1297.822021] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1297.822021] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Faults: ['InvalidArgument'] [ 1297.822021] env[61573]: ERROR nova.compute.manager [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] [ 1297.822021] env[61573]: DEBUG nova.compute.utils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1297.823497] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Build of instance 36a5ff6d-5123-4323-8e86-3529828af0ab was re-scheduled: A specified parameter was not correct: fileType [ 1297.823497] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1297.823879] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1297.824065] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1297.824226] env[61573]: DEBUG nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1297.824387] env[61573]: DEBUG nova.network.neutron [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1298.276066] env[61573]: DEBUG nova.network.neutron [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.289708] env[61573]: INFO nova.compute.manager [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Took 0.47 seconds to deallocate network for instance. [ 1298.423052] env[61573]: INFO nova.scheduler.client.report [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Deleted allocations for instance 36a5ff6d-5123-4323-8e86-3529828af0ab [ 1298.450395] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ef851f25-0099-410c-97b7-2c6702eb5e3f tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.664s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.451700] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 386.082s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.451977] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Acquiring lock "36a5ff6d-5123-4323-8e86-3529828af0ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.452197] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.452374] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.454478] env[61573]: INFO nova.compute.manager [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Terminating instance [ 1298.456344] env[61573]: DEBUG nova.compute.manager [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1298.456609] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1298.457127] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-03ca7e8b-11d0-489c-8738-0e2142b6fab3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.467333] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fccd739-1684-4cfb-8463-0c95f052ec4e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.478673] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1298.504750] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 36a5ff6d-5123-4323-8e86-3529828af0ab could not be found. [ 1298.504935] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1298.505296] env[61573]: INFO nova.compute.manager [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1298.505420] env[61573]: DEBUG oslo.service.loopingcall [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1298.505635] env[61573]: DEBUG nova.compute.manager [-] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1298.505733] env[61573]: DEBUG nova.network.neutron [-] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1298.530528] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.530779] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.532435] env[61573]: INFO nova.compute.claims [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1298.535613] env[61573]: DEBUG nova.network.neutron [-] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.542969] env[61573]: INFO nova.compute.manager [-] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] Took 0.04 seconds to deallocate network for instance. [ 1298.672515] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7c24a8ac-eabd-4d49-8d00-1d6e0d8998fb tempest-ListImageFiltersTestJSON-380092424 tempest-ListImageFiltersTestJSON-380092424-project-member] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.673629] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 164.953s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.673629] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 36a5ff6d-5123-4323-8e86-3529828af0ab] During sync_power_state the instance has a pending task (deleting). Skip. [ 1298.673912] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "36a5ff6d-5123-4323-8e86-3529828af0ab" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.835591] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fd7e0a-9217-46a3-b349-6b62f3fa0c2d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.843751] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154db116-2f4a-4b27-a9b3-e8d84653d4b9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.873753] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b78e426-5337-47bc-9aa6-b29205929b20 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.881867] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6f7074-6118-4c56-a92e-fc5d65db84fc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.895865] env[61573]: DEBUG nova.compute.provider_tree [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.904768] env[61573]: DEBUG nova.scheduler.client.report [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1298.919063] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.388s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.919535] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1298.956047] env[61573]: DEBUG nova.compute.utils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1298.957068] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1298.957247] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1298.967202] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1299.022223] env[61573]: DEBUG nova.policy [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '161622463606455fa04c1bac29a04eb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6cc1ffdc2dc45ad85b0be67c4c8b6c1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1299.042676] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1299.071870] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1299.072542] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1299.072542] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1299.072542] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1299.072722] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1299.072722] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1299.073150] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1299.073150] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1299.073373] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1299.073578] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1299.073757] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1299.074903] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5131ce6d-c4a8-4053-96a2-a189d62e98ba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.083183] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40646d7-a806-4820-905f-b2c7a7613fd4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.646293] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Successfully created port: 90975e85-81c1-4d13-bee8-f7fbc9aff8a1 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1300.494510] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Successfully updated port: 90975e85-81c1-4d13-bee8-f7fbc9aff8a1 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1300.514598] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "refresh_cache-a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.515994] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "refresh_cache-a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.516227] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1300.537512] env[61573]: DEBUG nova.compute.manager [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Received event network-vif-plugged-90975e85-81c1-4d13-bee8-f7fbc9aff8a1 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1300.537512] env[61573]: DEBUG oslo_concurrency.lockutils [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] Acquiring lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.537512] env[61573]: DEBUG oslo_concurrency.lockutils [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.537512] env[61573]: DEBUG oslo_concurrency.lockutils [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.537729] env[61573]: DEBUG nova.compute.manager [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] No waiting events found dispatching network-vif-plugged-90975e85-81c1-4d13-bee8-f7fbc9aff8a1 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1300.537729] env[61573]: WARNING nova.compute.manager [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Received unexpected event network-vif-plugged-90975e85-81c1-4d13-bee8-f7fbc9aff8a1 for instance with vm_state building and task_state spawning. [ 1300.537729] env[61573]: DEBUG nova.compute.manager [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Received event network-changed-90975e85-81c1-4d13-bee8-f7fbc9aff8a1 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1300.537729] env[61573]: DEBUG nova.compute.manager [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Refreshing instance network info cache due to event network-changed-90975e85-81c1-4d13-bee8-f7fbc9aff8a1. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1300.537729] env[61573]: DEBUG oslo_concurrency.lockutils [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] Acquiring lock "refresh_cache-a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.587674] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1300.853368] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Updating instance_info_cache with network_info: [{"id": "90975e85-81c1-4d13-bee8-f7fbc9aff8a1", "address": "fa:16:3e:63:71:60", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90975e85-81", "ovs_interfaceid": "90975e85-81c1-4d13-bee8-f7fbc9aff8a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.865802] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "refresh_cache-a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.866130] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Instance network_info: |[{"id": "90975e85-81c1-4d13-bee8-f7fbc9aff8a1", "address": "fa:16:3e:63:71:60", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90975e85-81", "ovs_interfaceid": "90975e85-81c1-4d13-bee8-f7fbc9aff8a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1300.866446] env[61573]: DEBUG oslo_concurrency.lockutils [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] Acquired lock "refresh_cache-a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.866713] env[61573]: DEBUG nova.network.neutron [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Refreshing network info cache for port 90975e85-81c1-4d13-bee8-f7fbc9aff8a1 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1300.867914] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:71:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62f28d75-4e6a-4ae5-b8b3-d0652ea26d08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90975e85-81c1-4d13-bee8-f7fbc9aff8a1', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1300.876797] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating folder: Project (c6cc1ffdc2dc45ad85b0be67c4c8b6c1). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1300.880019] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab04d7a2-dff6-4096-b60a-8b04ea17a690 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.893440] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created folder: Project (c6cc1ffdc2dc45ad85b0be67c4c8b6c1) in parent group-v942801. [ 1300.893440] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating folder: Instances. Parent ref: group-v942880. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1300.893666] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28c76db9-dbc3-4a07-b66d-18f232a9fc47 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.909508] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created folder: Instances in parent group-v942880. [ 1300.909761] env[61573]: DEBUG oslo.service.loopingcall [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1300.910030] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1300.910249] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0aa5e62e-184d-4fab-ae3a-4fe59424b0ac {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.932494] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1300.932494] env[61573]: value = "task-4836845" [ 1300.932494] env[61573]: _type = "Task" [ 1300.932494] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.941514] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836845, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.208852] env[61573]: DEBUG nova.network.neutron [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Updated VIF entry in instance network info cache for port 90975e85-81c1-4d13-bee8-f7fbc9aff8a1. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1301.209287] env[61573]: DEBUG nova.network.neutron [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Updating instance_info_cache with network_info: [{"id": "90975e85-81c1-4d13-bee8-f7fbc9aff8a1", "address": "fa:16:3e:63:71:60", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90975e85-81", "ovs_interfaceid": "90975e85-81c1-4d13-bee8-f7fbc9aff8a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.220368] env[61573]: DEBUG oslo_concurrency.lockutils [req-abee0453-b9f4-48f1-b156-2cee43c80fc0 req-f6ebb3ad-7747-48fd-a870-aa46da2cc1b4 service nova] Releasing lock "refresh_cache-a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.443682] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836845, 'name': CreateVM_Task, 'duration_secs': 0.280195} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.443786] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1301.444543] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.444696] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.445040] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1301.445305] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4548576-7e53-495d-8435-30dd8d5c4d8e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.450343] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1301.450343] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5232a90b-afb0-626d-9ece-6fd61aadebc6" [ 1301.450343] env[61573]: _type = "Task" [ 1301.450343] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.458894] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5232a90b-afb0-626d-9ece-6fd61aadebc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.961117] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.961432] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1301.961598] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.367853] env[61573]: WARNING oslo_vmware.rw_handles [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1343.367853] env[61573]: ERROR oslo_vmware.rw_handles [ 1343.368506] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/e2e51076-69ee-4eb2-b470-bb8a7f33ded0/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1343.370283] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1343.370519] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Copying Virtual Disk [datastore2] vmware_temp/e2e51076-69ee-4eb2-b470-bb8a7f33ded0/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/e2e51076-69ee-4eb2-b470-bb8a7f33ded0/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1343.370815] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac2aa3fe-259d-4857-a839-d529a0792144 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.379312] env[61573]: DEBUG oslo_vmware.api [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Waiting for the task: (returnval){ [ 1343.379312] env[61573]: value = "task-4836846" [ 1343.379312] env[61573]: _type = "Task" [ 1343.379312] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.387225] env[61573]: DEBUG oslo_vmware.api [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Task: {'id': task-4836846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.422923] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1343.889613] env[61573]: DEBUG oslo_vmware.exceptions [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1343.889843] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.890434] env[61573]: ERROR nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1343.890434] env[61573]: Faults: ['InvalidArgument'] [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Traceback (most recent call last): [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] yield resources [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] self.driver.spawn(context, instance, image_meta, [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] self._fetch_image_if_missing(context, vi) [ 1343.890434] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] image_cache(vi, tmp_image_ds_loc) [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] vm_util.copy_virtual_disk( [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] session._wait_for_task(vmdk_copy_task) [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] return self.wait_for_task(task_ref) [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] return evt.wait() [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] result = hub.switch() [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1343.890764] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] return self.greenlet.switch() [ 1343.891062] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1343.891062] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] self.f(*self.args, **self.kw) [ 1343.891062] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1343.891062] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] raise exceptions.translate_fault(task_info.error) [ 1343.891062] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1343.891062] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Faults: ['InvalidArgument'] [ 1343.891062] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] [ 1343.891062] env[61573]: INFO nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Terminating instance [ 1343.892326] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.892573] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1343.892827] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8120c8f7-6eee-4e3a-ac5a-06f70c8f0364 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.895173] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "refresh_cache-ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.895417] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquired lock "refresh_cache-ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.895599] env[61573]: DEBUG nova.network.neutron [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1343.903652] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1343.903839] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1343.904581] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31d3cb75-554c-4e49-bdf3-6dd5dcf2d9c8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.913747] env[61573]: DEBUG oslo_vmware.api [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Waiting for the task: (returnval){ [ 1343.913747] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d884fe-870f-f571-9f57-41ed542982dd" [ 1343.913747] env[61573]: _type = "Task" [ 1343.913747] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.922468] env[61573]: DEBUG oslo_vmware.api [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d884fe-870f-f571-9f57-41ed542982dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.931845] env[61573]: DEBUG nova.network.neutron [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1344.050737] env[61573]: DEBUG nova.network.neutron [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.059472] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Releasing lock "refresh_cache-ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.060277] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1344.060277] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1344.061497] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c1a0cb-5913-48be-a560-51c0b68b5044 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.070582] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1344.070691] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ee020d7-b90c-4d62-9077-322879551290 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.101048] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1344.101286] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1344.101475] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Deleting the datastore file [datastore2] ce7d9bf8-55ad-4bbc-a139-55ff85cda08f {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1344.101739] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2468226-3988-4372-809f-3c277e5ded8b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.109902] env[61573]: DEBUG oslo_vmware.api [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Waiting for the task: (returnval){ [ 1344.109902] env[61573]: value = "task-4836848" [ 1344.109902] env[61573]: _type = "Task" [ 1344.109902] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.118809] env[61573]: DEBUG oslo_vmware.api [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Task: {'id': task-4836848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.426025] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1344.426025] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Creating directory with path [datastore2] vmware_temp/52338e00-c392-4895-b874-b258ebb50f93/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1344.426025] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c836f06d-aa51-4b99-940b-8c4c1eeb5064 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.437962] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Created directory with path [datastore2] vmware_temp/52338e00-c392-4895-b874-b258ebb50f93/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1344.438198] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Fetch image to [datastore2] vmware_temp/52338e00-c392-4895-b874-b258ebb50f93/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1344.438399] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/52338e00-c392-4895-b874-b258ebb50f93/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1344.439163] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6b835a-6834-4bb6-9c3a-fcec06f6e4d1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.446621] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd0f15b-dbc1-4231-817a-230684057424 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.456279] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87a89b1-746d-46cd-9bab-85c20e0c4ddb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.488954] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fee774e-4fe6-46e3-995b-ee7ae0088446 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.495399] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6b962d0b-86fb-476c-b298-09445430b2dc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.515969] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1344.567792] env[61573]: DEBUG oslo_vmware.rw_handles [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52338e00-c392-4895-b874-b258ebb50f93/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1344.627459] env[61573]: DEBUG oslo_vmware.rw_handles [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1344.627688] env[61573]: DEBUG oslo_vmware.rw_handles [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52338e00-c392-4895-b874-b258ebb50f93/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1344.631722] env[61573]: DEBUG oslo_vmware.api [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Task: {'id': task-4836848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045729} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.631984] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1344.632212] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1344.632438] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1344.632621] env[61573]: INFO nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Took 0.57 seconds to destroy the instance on the hypervisor. [ 1344.632888] env[61573]: DEBUG oslo.service.loopingcall [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.633148] env[61573]: DEBUG nova.compute.manager [-] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Skipping network deallocation for instance since networking was not requested. {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1344.635442] env[61573]: DEBUG nova.compute.claims [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1344.635618] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.635857] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.884707] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f82784-cc4a-4f2a-9be3-0484c3f25f3e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.893100] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac7a612-1712-4630-9161-ca8c2d161f55 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.925610] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56978f4-e2d6-4c59-8401-f98ea2fa4aa9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.933203] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecaeddad-e84c-401b-9942-bad02aa8796f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.947170] env[61573]: DEBUG nova.compute.provider_tree [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.956152] env[61573]: DEBUG nova.scheduler.client.report [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1344.970459] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.334s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.971007] env[61573]: ERROR nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1344.971007] env[61573]: Faults: ['InvalidArgument'] [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Traceback (most recent call last): [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] self.driver.spawn(context, instance, image_meta, [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] self._fetch_image_if_missing(context, vi) [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] image_cache(vi, tmp_image_ds_loc) [ 1344.971007] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] vm_util.copy_virtual_disk( [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] session._wait_for_task(vmdk_copy_task) [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] return self.wait_for_task(task_ref) [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] return evt.wait() [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] result = hub.switch() [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] return self.greenlet.switch() [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1344.971351] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] self.f(*self.args, **self.kw) [ 1344.971679] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1344.971679] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] raise exceptions.translate_fault(task_info.error) [ 1344.971679] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1344.971679] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Faults: ['InvalidArgument'] [ 1344.971679] env[61573]: ERROR nova.compute.manager [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] [ 1344.971807] env[61573]: DEBUG nova.compute.utils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1344.973177] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Build of instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f was re-scheduled: A specified parameter was not correct: fileType [ 1344.973177] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1344.973563] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1344.973788] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "refresh_cache-ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.973933] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquired lock "refresh_cache-ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.974109] env[61573]: DEBUG nova.network.neutron [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1345.006418] env[61573]: DEBUG nova.network.neutron [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1345.116188] env[61573]: DEBUG nova.network.neutron [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.125990] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Releasing lock "refresh_cache-ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.126231] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1345.126410] env[61573]: DEBUG nova.compute.manager [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Skipping network deallocation for instance since networking was not requested. {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1345.235344] env[61573]: INFO nova.scheduler.client.report [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Deleted allocations for instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f [ 1345.259373] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dd943403-8bef-44da-86fb-da166f8fd056 tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 553.243s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.260736] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 351.840s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.261102] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.261339] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.261586] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.263856] env[61573]: INFO nova.compute.manager [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Terminating instance [ 1345.265575] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquiring lock "refresh_cache-ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.265772] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Acquired lock "refresh_cache-ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.265977] env[61573]: DEBUG nova.network.neutron [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1345.270925] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1345.293888] env[61573]: DEBUG nova.network.neutron [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1345.323689] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.323850] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.325428] env[61573]: INFO nova.compute.claims [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1345.383604] env[61573]: DEBUG nova.network.neutron [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.395324] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Releasing lock "refresh_cache-ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.398857] env[61573]: DEBUG nova.compute.manager [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1345.398857] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1345.398857] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d92a715-738a-4baf-b240-e3ca9973659a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.406403] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9050e378-122f-49fa-887e-050097744253 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.444240] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ce7d9bf8-55ad-4bbc-a139-55ff85cda08f could not be found. [ 1345.444497] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1345.444630] env[61573]: INFO nova.compute.manager [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1345.444872] env[61573]: DEBUG oslo.service.loopingcall [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.447459] env[61573]: DEBUG nova.compute.manager [-] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1345.447567] env[61573]: DEBUG nova.network.neutron [-] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1345.466590] env[61573]: DEBUG nova.network.neutron [-] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1345.475045] env[61573]: DEBUG nova.network.neutron [-] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.484685] env[61573]: INFO nova.compute.manager [-] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] Took 0.04 seconds to deallocate network for instance. [ 1345.576255] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f64869a2-44b5-46b7-b475-3fa858e0799e tempest-ServerShowV247Test-355967977 tempest-ServerShowV247Test-355967977-project-member] Lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.315s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.577577] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 211.856s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.577577] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: ce7d9bf8-55ad-4bbc-a139-55ff85cda08f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1345.577577] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "ce7d9bf8-55ad-4bbc-a139-55ff85cda08f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.591319] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd95549-2cf5-4db3-953a-2b516b5c0ed9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.599052] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200949f7-984c-435d-9238-3c22cb886857 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.629216] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd626a2-0927-450b-ac71-45bbd2762e23 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.636617] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4465f15-fba4-4d6c-81a0-aae202b3dcd5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.649528] env[61573]: DEBUG nova.compute.provider_tree [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1345.658602] env[61573]: DEBUG nova.scheduler.client.report [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1345.674065] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.350s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.674751] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1345.718287] env[61573]: DEBUG nova.compute.utils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1345.719816] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1345.720047] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1345.731139] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1345.788448] env[61573]: DEBUG nova.policy [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '161622463606455fa04c1bac29a04eb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6cc1ffdc2dc45ad85b0be67c4c8b6c1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1345.796134] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1345.823991] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1345.824784] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1345.824870] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1345.825179] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1345.825369] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1345.825614] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1345.825850] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1345.826026] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1345.826199] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1345.826365] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1345.826538] env[61573]: DEBUG nova.virt.hardware [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1345.827450] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0ae385-8e8e-4e30-a270-c393e4749ef2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.836319] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47404810-617e-4d8d-850d-4ac208e9973d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.225048] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Successfully created port: c26dd644-828e-45b2-90c5-1d5380eb3e6a {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1346.403672] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.872865] env[61573]: DEBUG nova.compute.manager [req-fdb05e97-bf7d-4783-a51e-e0b1eab4d9a3 req-0521c458-bf4e-4d2e-b4fc-e9f56d8b0aef service nova] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Received event network-vif-plugged-c26dd644-828e-45b2-90c5-1d5380eb3e6a {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1346.873153] env[61573]: DEBUG oslo_concurrency.lockutils [req-fdb05e97-bf7d-4783-a51e-e0b1eab4d9a3 req-0521c458-bf4e-4d2e-b4fc-e9f56d8b0aef service nova] Acquiring lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.873323] env[61573]: DEBUG oslo_concurrency.lockutils [req-fdb05e97-bf7d-4783-a51e-e0b1eab4d9a3 req-0521c458-bf4e-4d2e-b4fc-e9f56d8b0aef service nova] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.873494] env[61573]: DEBUG oslo_concurrency.lockutils [req-fdb05e97-bf7d-4783-a51e-e0b1eab4d9a3 req-0521c458-bf4e-4d2e-b4fc-e9f56d8b0aef service nova] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.873666] env[61573]: DEBUG nova.compute.manager [req-fdb05e97-bf7d-4783-a51e-e0b1eab4d9a3 req-0521c458-bf4e-4d2e-b4fc-e9f56d8b0aef service nova] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] No waiting events found dispatching network-vif-plugged-c26dd644-828e-45b2-90c5-1d5380eb3e6a {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1346.873835] env[61573]: WARNING nova.compute.manager [req-fdb05e97-bf7d-4783-a51e-e0b1eab4d9a3 req-0521c458-bf4e-4d2e-b4fc-e9f56d8b0aef service nova] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Received unexpected event network-vif-plugged-c26dd644-828e-45b2-90c5-1d5380eb3e6a for instance with vm_state building and task_state spawning. [ 1346.958289] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Successfully updated port: c26dd644-828e-45b2-90c5-1d5380eb3e6a {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1346.969962] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "refresh_cache-e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.969962] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "refresh_cache-e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.970368] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1347.017650] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1347.279601] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Updating instance_info_cache with network_info: [{"id": "c26dd644-828e-45b2-90c5-1d5380eb3e6a", "address": "fa:16:3e:98:b9:88", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26dd644-82", "ovs_interfaceid": "c26dd644-828e-45b2-90c5-1d5380eb3e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.303603] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "refresh_cache-e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.303929] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Instance network_info: |[{"id": "c26dd644-828e-45b2-90c5-1d5380eb3e6a", "address": "fa:16:3e:98:b9:88", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26dd644-82", "ovs_interfaceid": "c26dd644-828e-45b2-90c5-1d5380eb3e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1347.304373] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:b9:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62f28d75-4e6a-4ae5-b8b3-d0652ea26d08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c26dd644-828e-45b2-90c5-1d5380eb3e6a', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1347.311880] env[61573]: DEBUG oslo.service.loopingcall [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.312387] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1347.312614] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8133d38a-baf7-49cc-abc1-91eb480fa006 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.335149] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1347.335149] env[61573]: value = "task-4836849" [ 1347.335149] env[61573]: _type = "Task" [ 1347.335149] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.343552] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836849, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.846049] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836849, 'name': CreateVM_Task, 'duration_secs': 0.323421} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.846248] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1347.846842] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.847023] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.847399] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1347.847662] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7a58b4c-0365-474b-a3b3-d43c5c304f63 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.852585] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1347.852585] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]525ddf37-8bdd-8135-6b54-a0b8f6e995b4" [ 1347.852585] env[61573]: _type = "Task" [ 1347.852585] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.860673] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]525ddf37-8bdd-8135-6b54-a0b8f6e995b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.363967] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.364325] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1348.365051] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.403300] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.414786] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.415019] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.415218] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.415375] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1348.416688] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c283c8bb-ce0e-4479-b3c7-f2f8ee6794c9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.426040] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a4ed2f-d2e8-487c-bd92-b422327b8655 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.442012] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed2c6e1-f559-43e9-bfad-b3d6b69f7cbe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.449050] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c06b57f-5aa7-48f1-9396-c7611f497f64 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.480077] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180521MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1348.480372] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.480680] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.561954] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.562015] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.562386] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.562519] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.562767] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.563008] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.563269] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.563437] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.563618] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.563774] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1348.576691] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 92145176-a567-4098-a9d4-f74a9316e38c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1348.588017] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1316abcb-b4b8-4e7d-858e-55a4db29b429 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1348.599041] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1348.610153] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e045b957-1670-4277-a9e3-d4ce1fb24047 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1348.621880] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 89d2c816-dcac-41fd-b760-d93348dbbebd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1348.622189] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1348.622367] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '60', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_c510d515c28b4abb9f0dcc76c0032b88': '1', 'io_workload': '10', 'num_proj_4ebf7b3bebe84e8f8a46532726f8935b': '1', 'num_proj_b194ec23978b411dbedfcb7095dbb743': '1', 'num_proj_38a7a73ba2f74211829146bc750ec0aa': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_8df0e09d5e0344a78c770ce0fb3392ad': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_task_spawning': '2', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1348.835215] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1a7b45-46f4-4962-a119-b4c617dbd617 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.843864] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a097e1-79f5-4a06-97ce-309015868f7a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.875506] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b51b30-dc9a-4695-981d-f533b4b6121b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.883433] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66047a8a-1f33-4f0f-bc10-508891730aa3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.897436] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1348.903754] env[61573]: DEBUG nova.compute.manager [req-a6e665b9-e363-4878-a341-4b22b0038f8d req-c111579e-e1e0-4c65-9642-2aeef58632a4 service nova] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Received event network-changed-c26dd644-828e-45b2-90c5-1d5380eb3e6a {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1348.903961] env[61573]: DEBUG nova.compute.manager [req-a6e665b9-e363-4878-a341-4b22b0038f8d req-c111579e-e1e0-4c65-9642-2aeef58632a4 service nova] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Refreshing instance network info cache due to event network-changed-c26dd644-828e-45b2-90c5-1d5380eb3e6a. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1348.904237] env[61573]: DEBUG oslo_concurrency.lockutils [req-a6e665b9-e363-4878-a341-4b22b0038f8d req-c111579e-e1e0-4c65-9642-2aeef58632a4 service nova] Acquiring lock "refresh_cache-e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.904498] env[61573]: DEBUG oslo_concurrency.lockutils [req-a6e665b9-e363-4878-a341-4b22b0038f8d req-c111579e-e1e0-4c65-9642-2aeef58632a4 service nova] Acquired lock "refresh_cache-e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.904558] env[61573]: DEBUG nova.network.neutron [req-a6e665b9-e363-4878-a341-4b22b0038f8d req-c111579e-e1e0-4c65-9642-2aeef58632a4 service nova] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Refreshing network info cache for port c26dd644-828e-45b2-90c5-1d5380eb3e6a {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1348.906753] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1348.920170] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1348.920170] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.439s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.165703] env[61573]: DEBUG nova.network.neutron [req-a6e665b9-e363-4878-a341-4b22b0038f8d req-c111579e-e1e0-4c65-9642-2aeef58632a4 service nova] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Updated VIF entry in instance network info cache for port c26dd644-828e-45b2-90c5-1d5380eb3e6a. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1349.166021] env[61573]: DEBUG nova.network.neutron [req-a6e665b9-e363-4878-a341-4b22b0038f8d req-c111579e-e1e0-4c65-9642-2aeef58632a4 service nova] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Updating instance_info_cache with network_info: [{"id": "c26dd644-828e-45b2-90c5-1d5380eb3e6a", "address": "fa:16:3e:98:b9:88", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26dd644-82", "ovs_interfaceid": "c26dd644-828e-45b2-90c5-1d5380eb3e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.176478] env[61573]: DEBUG oslo_concurrency.lockutils [req-a6e665b9-e363-4878-a341-4b22b0038f8d req-c111579e-e1e0-4c65-9642-2aeef58632a4 service nova] Releasing lock "refresh_cache-e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.915543] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.915900] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.915948] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1349.916081] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1349.939822] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.940027] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.940140] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.940265] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.940385] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.940505] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.940619] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.940733] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.940847] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.940959] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1349.941086] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1349.941572] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.941765] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.941914] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.942075] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.942230] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.942363] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1354.560227] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.632226] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.363220] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquiring lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.363530] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.756652] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "adb2282f-224e-4a56-abd8-cd91bd0023f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.756652] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.326246] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "d272f481-f590-46e9-9f51-0c7601ff34ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.326615] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.714763] env[61573]: WARNING oslo_vmware.rw_handles [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1391.714763] env[61573]: ERROR oslo_vmware.rw_handles [ 1391.714763] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/52338e00-c392-4895-b874-b258ebb50f93/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1391.716737] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1391.716996] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Copying Virtual Disk [datastore2] vmware_temp/52338e00-c392-4895-b874-b258ebb50f93/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/52338e00-c392-4895-b874-b258ebb50f93/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1391.717310] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-006f0425-7a84-418f-a20c-c5fabe77bf22 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.727057] env[61573]: DEBUG oslo_vmware.api [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Waiting for the task: (returnval){ [ 1391.727057] env[61573]: value = "task-4836850" [ 1391.727057] env[61573]: _type = "Task" [ 1391.727057] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.735874] env[61573]: DEBUG oslo_vmware.api [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Task: {'id': task-4836850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.238009] env[61573]: DEBUG oslo_vmware.exceptions [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1392.238304] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.238911] env[61573]: ERROR nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1392.238911] env[61573]: Faults: ['InvalidArgument'] [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Traceback (most recent call last): [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] yield resources [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] self.driver.spawn(context, instance, image_meta, [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] self._fetch_image_if_missing(context, vi) [ 1392.238911] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] image_cache(vi, tmp_image_ds_loc) [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] vm_util.copy_virtual_disk( [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] session._wait_for_task(vmdk_copy_task) [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] return self.wait_for_task(task_ref) [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] return evt.wait() [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] result = hub.switch() [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1392.239344] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] return self.greenlet.switch() [ 1392.239775] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1392.239775] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] self.f(*self.args, **self.kw) [ 1392.239775] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1392.239775] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] raise exceptions.translate_fault(task_info.error) [ 1392.239775] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1392.239775] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Faults: ['InvalidArgument'] [ 1392.239775] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] [ 1392.239775] env[61573]: INFO nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Terminating instance [ 1392.240829] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.241054] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1392.241300] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5609cb31-a887-499b-9684-8102940c8c8c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.244211] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1392.244450] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1392.245195] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c4e0ff-a9e6-4484-96f9-84d5c79b4ea5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.252379] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1392.252692] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6fc4b62a-c0a0-4641-bb1e-27f6e045f90f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.254928] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1392.255113] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1392.256159] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-044eb8a0-12d4-45c9-b0de-d0846870f601 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.260929] env[61573]: DEBUG oslo_vmware.api [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Waiting for the task: (returnval){ [ 1392.260929] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52ad5495-c433-2323-a737-f6c18bcafa7f" [ 1392.260929] env[61573]: _type = "Task" [ 1392.260929] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.269152] env[61573]: DEBUG oslo_vmware.api [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52ad5495-c433-2323-a737-f6c18bcafa7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.324684] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1392.324891] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1392.325096] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Deleting the datastore file [datastore2] 54860ec5-a1ff-4d7d-ae70-769f8fad731b {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1392.325373] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4321e792-de24-421b-995a-dd30d9966c1f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.332298] env[61573]: DEBUG oslo_vmware.api [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Waiting for the task: (returnval){ [ 1392.332298] env[61573]: value = "task-4836852" [ 1392.332298] env[61573]: _type = "Task" [ 1392.332298] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.341555] env[61573]: DEBUG oslo_vmware.api [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Task: {'id': task-4836852, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.771245] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1392.771573] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Creating directory with path [datastore2] vmware_temp/8de6ae31-6203-457d-b04e-843e08f972b5/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1392.771760] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3df5f73d-86bb-443d-898d-197aef8ad42c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.784010] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Created directory with path [datastore2] vmware_temp/8de6ae31-6203-457d-b04e-843e08f972b5/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1392.784217] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Fetch image to [datastore2] vmware_temp/8de6ae31-6203-457d-b04e-843e08f972b5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1392.784380] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/8de6ae31-6203-457d-b04e-843e08f972b5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1392.785176] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fb124f-f3e1-4769-8c60-60c3d2ef7b4f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.791987] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c86a886-ed21-46a1-9615-dbc4c5ef3038 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.801471] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83f5a72-0cd5-493b-b6b0-29a566845027 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.837098] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bd33e5-6211-45eb-b882-e90732dcd15d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.846746] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1d3e74d2-9acf-417c-8eff-5f7db9a1b8fe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.848560] env[61573]: DEBUG oslo_vmware.api [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Task: {'id': task-4836852, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075585} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.848822] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1392.849015] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1392.849194] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1392.849363] env[61573]: INFO nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1392.851796] env[61573]: DEBUG nova.compute.claims [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1392.851973] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.852218] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.870762] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1392.924123] env[61573]: DEBUG oslo_vmware.rw_handles [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8de6ae31-6203-457d-b04e-843e08f972b5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1392.984570] env[61573]: DEBUG oslo_vmware.rw_handles [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1392.984740] env[61573]: DEBUG oslo_vmware.rw_handles [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8de6ae31-6203-457d-b04e-843e08f972b5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1393.167603] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5f50e3-0403-48c9-9841-c671ebbdff21 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.175603] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966880df-4091-47e9-b5f7-4f4bef8f8193 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.208278] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160ffbca-e84d-4046-99a0-781b3f66d225 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.216582] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc4936b-99cb-4075-8858-aa9b424609a6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.230268] env[61573]: DEBUG nova.compute.provider_tree [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1393.238663] env[61573]: DEBUG nova.scheduler.client.report [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1393.254029] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.402s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.254593] env[61573]: ERROR nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1393.254593] env[61573]: Faults: ['InvalidArgument'] [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Traceback (most recent call last): [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] self.driver.spawn(context, instance, image_meta, [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] self._fetch_image_if_missing(context, vi) [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] image_cache(vi, tmp_image_ds_loc) [ 1393.254593] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] vm_util.copy_virtual_disk( [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] session._wait_for_task(vmdk_copy_task) [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] return self.wait_for_task(task_ref) [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] return evt.wait() [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] result = hub.switch() [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] return self.greenlet.switch() [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1393.254880] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] self.f(*self.args, **self.kw) [ 1393.255316] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1393.255316] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] raise exceptions.translate_fault(task_info.error) [ 1393.255316] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1393.255316] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Faults: ['InvalidArgument'] [ 1393.255316] env[61573]: ERROR nova.compute.manager [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] [ 1393.255316] env[61573]: DEBUG nova.compute.utils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1393.256773] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Build of instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b was re-scheduled: A specified parameter was not correct: fileType [ 1393.256773] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1393.257234] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1393.257407] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1393.257574] env[61573]: DEBUG nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1393.257779] env[61573]: DEBUG nova.network.neutron [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1393.595677] env[61573]: DEBUG nova.network.neutron [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.608398] env[61573]: INFO nova.compute.manager [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Took 0.35 seconds to deallocate network for instance. [ 1393.711330] env[61573]: INFO nova.scheduler.client.report [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Deleted allocations for instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b [ 1393.744414] env[61573]: DEBUG oslo_concurrency.lockutils [None req-88f5bdee-0396-40d8-86de-a30a6d24acdf tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 568.028s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.746071] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 371.288s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.746933] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.747289] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.747482] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.749855] env[61573]: INFO nova.compute.manager [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Terminating instance [ 1393.753856] env[61573]: DEBUG nova.compute.manager [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1393.754063] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1393.754343] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28f4de43-8845-4395-93b1-8e8e433383d0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.767193] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6b9530-c64b-49ee-bac8-f05ce22b811c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.786139] env[61573]: DEBUG nova.compute.manager [None req-396dee37-420e-47da-8c75-dfbd5bcede63 tempest-ServerDiagnosticsNegativeTest-341467014 tempest-ServerDiagnosticsNegativeTest-341467014-project-member] [instance: 92145176-a567-4098-a9d4-f74a9316e38c] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1393.807318] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 54860ec5-a1ff-4d7d-ae70-769f8fad731b could not be found. [ 1393.807507] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1393.807706] env[61573]: INFO nova.compute.manager [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1393.807940] env[61573]: DEBUG oslo.service.loopingcall [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.808248] env[61573]: DEBUG nova.compute.manager [-] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1393.808337] env[61573]: DEBUG nova.network.neutron [-] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1393.816446] env[61573]: DEBUG nova.compute.manager [None req-396dee37-420e-47da-8c75-dfbd5bcede63 tempest-ServerDiagnosticsNegativeTest-341467014 tempest-ServerDiagnosticsNegativeTest-341467014-project-member] [instance: 92145176-a567-4098-a9d4-f74a9316e38c] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1393.836881] env[61573]: DEBUG nova.network.neutron [-] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.844367] env[61573]: INFO nova.compute.manager [-] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] Took 0.04 seconds to deallocate network for instance. [ 1393.852058] env[61573]: DEBUG oslo_concurrency.lockutils [None req-396dee37-420e-47da-8c75-dfbd5bcede63 tempest-ServerDiagnosticsNegativeTest-341467014 tempest-ServerDiagnosticsNegativeTest-341467014-project-member] Lock "92145176-a567-4098-a9d4-f74a9316e38c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.009s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.863412] env[61573]: DEBUG nova.compute.manager [None req-71c1c702-cab9-452e-bb8b-6ff6963c1815 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: 1316abcb-b4b8-4e7d-858e-55a4db29b429] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1393.898390] env[61573]: DEBUG nova.compute.manager [None req-71c1c702-cab9-452e-bb8b-6ff6963c1815 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: 1316abcb-b4b8-4e7d-858e-55a4db29b429] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1393.920031] env[61573]: DEBUG oslo_concurrency.lockutils [None req-71c1c702-cab9-452e-bb8b-6ff6963c1815 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "1316abcb-b4b8-4e7d-858e-55a4db29b429" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.508s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.929848] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1393.965621] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9a1960e8-a15e-4315-885f-fddb80706602 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.219s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.966102] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 260.245s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.966338] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 54860ec5-a1ff-4d7d-ae70-769f8fad731b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1393.966523] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "54860ec5-a1ff-4d7d-ae70-769f8fad731b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.997096] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.997096] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.997269] env[61573]: INFO nova.compute.claims [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1394.237772] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf6f4d5-730b-46c3-9b39-fa6f167b507e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.245332] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be3a0d2-0ac7-4566-a008-7bfca902e7c3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.276240] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418477a1-c154-4f50-9388-72b2d3b3c1a9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.284324] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957da19e-e898-4d53-aea8-47a53f4b912e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.300295] env[61573]: DEBUG nova.compute.provider_tree [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1394.309386] env[61573]: DEBUG nova.scheduler.client.report [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1394.326171] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.330s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.326681] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1394.359964] env[61573]: DEBUG nova.compute.utils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1394.361479] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1394.361657] env[61573]: DEBUG nova.network.neutron [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1394.372215] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1394.426137] env[61573]: DEBUG nova.policy [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4eef8f4d818540a88b1f91d0ce7af6fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5700bfb489004c86ac0b8ec509bd3758', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1394.444171] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1394.469508] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1394.469750] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1394.469909] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1394.470104] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1394.470254] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1394.470401] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1394.470606] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1394.470765] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1394.470935] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1394.471166] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1394.471349] env[61573]: DEBUG nova.virt.hardware [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1394.472217] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f720041d-f9a1-4811-baa1-b16ec4511818 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.480840] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaebcedd-16d0-42f1-a885-cb47cc1b5904 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.780273] env[61573]: DEBUG nova.network.neutron [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Successfully created port: ce4da051-80e0-4af1-9f40-d9896cd176da {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1394.991828] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquiring lock "3e075864-6503-4d83-bbd4-f0bec8104e03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.767923] env[61573]: DEBUG nova.compute.manager [req-3ed9ac26-1085-422a-9f8f-848495cc1620 req-5cab09c6-3ee2-46ef-b508-d6cbcc127fb5 service nova] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Received event network-vif-plugged-ce4da051-80e0-4af1-9f40-d9896cd176da {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1395.768170] env[61573]: DEBUG oslo_concurrency.lockutils [req-3ed9ac26-1085-422a-9f8f-848495cc1620 req-5cab09c6-3ee2-46ef-b508-d6cbcc127fb5 service nova] Acquiring lock "3e075864-6503-4d83-bbd4-f0bec8104e03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.768383] env[61573]: DEBUG oslo_concurrency.lockutils [req-3ed9ac26-1085-422a-9f8f-848495cc1620 req-5cab09c6-3ee2-46ef-b508-d6cbcc127fb5 service nova] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.768550] env[61573]: DEBUG oslo_concurrency.lockutils [req-3ed9ac26-1085-422a-9f8f-848495cc1620 req-5cab09c6-3ee2-46ef-b508-d6cbcc127fb5 service nova] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.768751] env[61573]: DEBUG nova.compute.manager [req-3ed9ac26-1085-422a-9f8f-848495cc1620 req-5cab09c6-3ee2-46ef-b508-d6cbcc127fb5 service nova] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] No waiting events found dispatching network-vif-plugged-ce4da051-80e0-4af1-9f40-d9896cd176da {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1395.768963] env[61573]: WARNING nova.compute.manager [req-3ed9ac26-1085-422a-9f8f-848495cc1620 req-5cab09c6-3ee2-46ef-b508-d6cbcc127fb5 service nova] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Received unexpected event network-vif-plugged-ce4da051-80e0-4af1-9f40-d9896cd176da for instance with vm_state building and task_state deleting. [ 1395.799655] env[61573]: DEBUG nova.network.neutron [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Successfully updated port: ce4da051-80e0-4af1-9f40-d9896cd176da {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1395.811674] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquiring lock "refresh_cache-3e075864-6503-4d83-bbd4-f0bec8104e03" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.811829] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquired lock "refresh_cache-3e075864-6503-4d83-bbd4-f0bec8104e03" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.811987] env[61573]: DEBUG nova.network.neutron [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1395.860437] env[61573]: DEBUG nova.network.neutron [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1396.039362] env[61573]: DEBUG nova.network.neutron [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Updating instance_info_cache with network_info: [{"id": "ce4da051-80e0-4af1-9f40-d9896cd176da", "address": "fa:16:3e:db:96:d2", "network": {"id": "a77ef09e-456b-4994-af2e-096759934323", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-469625737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5700bfb489004c86ac0b8ec509bd3758", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce4da051-80", "ovs_interfaceid": "ce4da051-80e0-4af1-9f40-d9896cd176da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.052700] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Releasing lock "refresh_cache-3e075864-6503-4d83-bbd4-f0bec8104e03" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.053024] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Instance network_info: |[{"id": "ce4da051-80e0-4af1-9f40-d9896cd176da", "address": "fa:16:3e:db:96:d2", "network": {"id": "a77ef09e-456b-4994-af2e-096759934323", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-469625737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5700bfb489004c86ac0b8ec509bd3758", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce4da051-80", "ovs_interfaceid": "ce4da051-80e0-4af1-9f40-d9896cd176da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1396.053432] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:96:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce4da051-80e0-4af1-9f40-d9896cd176da', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1396.061008] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Creating folder: Project (5700bfb489004c86ac0b8ec509bd3758). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1396.061621] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c62790c-9dcd-4372-8c81-e184ee09f5d9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.076424] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Created folder: Project (5700bfb489004c86ac0b8ec509bd3758) in parent group-v942801. [ 1396.076651] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Creating folder: Instances. Parent ref: group-v942884. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1396.076875] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8190807a-8c46-4821-89a4-b6712e4f3bcb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.087411] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Created folder: Instances in parent group-v942884. [ 1396.087665] env[61573]: DEBUG oslo.service.loopingcall [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.087932] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1396.088164] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8563d2db-49f9-4008-8014-2b21a3064f5d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.108572] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1396.108572] env[61573]: value = "task-4836855" [ 1396.108572] env[61573]: _type = "Task" [ 1396.108572] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.117205] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836855, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.619643] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836855, 'name': CreateVM_Task, 'duration_secs': 0.291984} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.619773] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1396.621048] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.621048] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.621260] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1396.621396] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34941e64-d944-48b1-9ff4-e42ef9cd1c93 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.626346] env[61573]: DEBUG oslo_vmware.api [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Waiting for the task: (returnval){ [ 1396.626346] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]528aa322-c459-0411-ad0f-7f32562c91bb" [ 1396.626346] env[61573]: _type = "Task" [ 1396.626346] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.634523] env[61573]: DEBUG oslo_vmware.api [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]528aa322-c459-0411-ad0f-7f32562c91bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.137340] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.137681] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1397.137864] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.792123] env[61573]: DEBUG nova.compute.manager [req-5796f243-988c-4f08-b5a7-aa9d2642f0b1 req-7316debc-c809-49fd-a005-94c0daa88339 service nova] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Received event network-changed-ce4da051-80e0-4af1-9f40-d9896cd176da {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1397.792330] env[61573]: DEBUG nova.compute.manager [req-5796f243-988c-4f08-b5a7-aa9d2642f0b1 req-7316debc-c809-49fd-a005-94c0daa88339 service nova] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Refreshing instance network info cache due to event network-changed-ce4da051-80e0-4af1-9f40-d9896cd176da. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1397.792540] env[61573]: DEBUG oslo_concurrency.lockutils [req-5796f243-988c-4f08-b5a7-aa9d2642f0b1 req-7316debc-c809-49fd-a005-94c0daa88339 service nova] Acquiring lock "refresh_cache-3e075864-6503-4d83-bbd4-f0bec8104e03" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.792682] env[61573]: DEBUG oslo_concurrency.lockutils [req-5796f243-988c-4f08-b5a7-aa9d2642f0b1 req-7316debc-c809-49fd-a005-94c0daa88339 service nova] Acquired lock "refresh_cache-3e075864-6503-4d83-bbd4-f0bec8104e03" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.792843] env[61573]: DEBUG nova.network.neutron [req-5796f243-988c-4f08-b5a7-aa9d2642f0b1 req-7316debc-c809-49fd-a005-94c0daa88339 service nova] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Refreshing network info cache for port ce4da051-80e0-4af1-9f40-d9896cd176da {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1398.056866] env[61573]: DEBUG nova.network.neutron [req-5796f243-988c-4f08-b5a7-aa9d2642f0b1 req-7316debc-c809-49fd-a005-94c0daa88339 service nova] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Updated VIF entry in instance network info cache for port ce4da051-80e0-4af1-9f40-d9896cd176da. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1398.057312] env[61573]: DEBUG nova.network.neutron [req-5796f243-988c-4f08-b5a7-aa9d2642f0b1 req-7316debc-c809-49fd-a005-94c0daa88339 service nova] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Updating instance_info_cache with network_info: [{"id": "ce4da051-80e0-4af1-9f40-d9896cd176da", "address": "fa:16:3e:db:96:d2", "network": {"id": "a77ef09e-456b-4994-af2e-096759934323", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-469625737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5700bfb489004c86ac0b8ec509bd3758", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce4da051-80", "ovs_interfaceid": "ce4da051-80e0-4af1-9f40-d9896cd176da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.069192] env[61573]: DEBUG oslo_concurrency.lockutils [req-5796f243-988c-4f08-b5a7-aa9d2642f0b1 req-7316debc-c809-49fd-a005-94c0daa88339 service nova] Releasing lock "refresh_cache-3e075864-6503-4d83-bbd4-f0bec8104e03" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.404805] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1398.405182] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11295}} [ 1398.418450] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] There are 0 instances to clean {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11304}} [ 1400.403855] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1400.404200] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances with incomplete migration {{(pid=61573) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11333}} [ 1405.353082] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquiring lock "c96755a9-1e1c-42ed-a170-35914ef05333" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.353426] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "c96755a9-1e1c-42ed-a170-35914ef05333" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.416101] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.439122] env[61573]: DEBUG oslo_concurrency.lockutils [None req-46ace650-85a4-4527-82d5-06f1a1c1a4fc tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "bb3d67c0-0686-46cb-8670-6cf7267790f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.439354] env[61573]: DEBUG oslo_concurrency.lockutils [None req-46ace650-85a4-4527-82d5-06f1a1c1a4fc tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "bb3d67c0-0686-46cb-8670-6cf7267790f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.404334] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.404608] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1409.404641] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1409.428448] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.428448] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.428619] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.428619] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.428773] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.428851] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.428974] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.429106] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.429234] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.429337] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1409.429489] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1409.430034] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.430293] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.430488] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.411969] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.411969] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.424501] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.424734] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.424902] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.425123] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1410.426362] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718dfca7-3a4a-4676-b822-91848ba166eb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.435196] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3c8ac1-6241-4d6f-9411-8d7daaa584b3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.449959] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d953aa63-88ef-42c8-bd17-5bb3b2cfdf7c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.457049] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da136d1-fb11-4090-9241-083a344e0e07 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.487204] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180558MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1410.487357] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.487516] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.629931] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.630111] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.630243] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.630361] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.630476] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.630594] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.630709] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.630824] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.630936] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.631061] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1410.647057] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 89d2c816-dcac-41fd-b760-d93348dbbebd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1410.658440] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1410.670859] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1410.682042] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1410.692627] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1410.703247] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance bb3d67c0-0686-46cb-8670-6cf7267790f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1410.703494] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1410.703657] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '63', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_4ebf7b3bebe84e8f8a46532726f8935b': '1', 'io_workload': '10', 'num_proj_b194ec23978b411dbedfcb7095dbb743': '1', 'num_proj_38a7a73ba2f74211829146bc750ec0aa': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_8df0e09d5e0344a78c770ce0fb3392ad': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_5700bfb489004c86ac0b8ec509bd3758': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1410.724126] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing inventories for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1410.739214] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Updating ProviderTree inventory for provider b1eff98b-2b30-4574-a87d-d151235a2dba from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1410.739453] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Updating inventory in ProviderTree for provider b1eff98b-2b30-4574-a87d-d151235a2dba with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1410.750133] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing aggregate associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, aggregates: None {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1410.769655] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing trait associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1410.974822] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3c012b-78d2-4cd2-8d52-098617743efc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.983119] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c557ec5-6324-496b-9625-5eb0fc6091a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.013967] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9be34ee-d456-4f35-a93f-812c427ae9a8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.022116] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e0d51a-7aff-4d4d-812d-d3049eb8e80e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.036363] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.045697] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1411.061304] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1411.061534] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.574s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.053532] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.053869] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.053986] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.054176] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1418.273119] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ed93f987-8820-4de0-8e6a-095f26c86fe2 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Acquiring lock "d50453ba-f4b2-4bd9-8545-c123f8f31878" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.273487] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ed93f987-8820-4de0-8e6a-095f26c86fe2 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "d50453ba-f4b2-4bd9-8545-c123f8f31878" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.756329] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8c2012a9-f361-43fb-b999-6c174fee98db tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "f29eb708-7f1b-4d3c-b932-ca9a1b346b3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.756638] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8c2012a9-f361-43fb-b999-6c174fee98db tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "f29eb708-7f1b-4d3c-b932-ca9a1b346b3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.276046] env[61573]: WARNING oslo_vmware.rw_handles [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1441.276046] env[61573]: ERROR oslo_vmware.rw_handles [ 1441.276738] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/8de6ae31-6203-457d-b04e-843e08f972b5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1441.278345] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1441.278592] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Copying Virtual Disk [datastore2] vmware_temp/8de6ae31-6203-457d-b04e-843e08f972b5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/8de6ae31-6203-457d-b04e-843e08f972b5/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1441.278881] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcaeee82-a5bc-4165-a9fe-a8159fcb606e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.286632] env[61573]: DEBUG oslo_vmware.api [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Waiting for the task: (returnval){ [ 1441.286632] env[61573]: value = "task-4836856" [ 1441.286632] env[61573]: _type = "Task" [ 1441.286632] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.295142] env[61573]: DEBUG oslo_vmware.api [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Task: {'id': task-4836856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.797531] env[61573]: DEBUG oslo_vmware.exceptions [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1441.797775] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.798381] env[61573]: ERROR nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1441.798381] env[61573]: Faults: ['InvalidArgument'] [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Traceback (most recent call last): [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] yield resources [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] self.driver.spawn(context, instance, image_meta, [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] self._fetch_image_if_missing(context, vi) [ 1441.798381] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] image_cache(vi, tmp_image_ds_loc) [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] vm_util.copy_virtual_disk( [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] session._wait_for_task(vmdk_copy_task) [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] return self.wait_for_task(task_ref) [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] return evt.wait() [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] result = hub.switch() [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1441.798745] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] return self.greenlet.switch() [ 1441.799090] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1441.799090] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] self.f(*self.args, **self.kw) [ 1441.799090] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1441.799090] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] raise exceptions.translate_fault(task_info.error) [ 1441.799090] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1441.799090] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Faults: ['InvalidArgument'] [ 1441.799090] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] [ 1441.799090] env[61573]: INFO nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Terminating instance [ 1441.800275] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.800484] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1441.800725] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6367c42c-d808-4649-8808-b5273e734b3d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.803205] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1441.803403] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1441.804151] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e6470e-1b0a-411f-a00f-69d63190af1c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.811145] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1441.811428] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d70af80-513a-4879-a52e-daf816574cb9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.813735] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1441.813911] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1441.814871] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53088b03-5f0c-4346-9a09-ead67400351b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.820179] env[61573]: DEBUG oslo_vmware.api [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Waiting for the task: (returnval){ [ 1441.820179] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524d4d43-fa36-1926-e71e-5fa15ad5e105" [ 1441.820179] env[61573]: _type = "Task" [ 1441.820179] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.831936] env[61573]: DEBUG oslo_vmware.api [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524d4d43-fa36-1926-e71e-5fa15ad5e105, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.879665] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1441.879871] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1441.880054] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Deleting the datastore file [datastore2] 8d624aa8-c52f-4d3b-bb7e-fac412249b97 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1441.880335] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa76aba4-c3fe-4632-b3ad-1b947e5f9d15 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.886600] env[61573]: DEBUG oslo_vmware.api [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Waiting for the task: (returnval){ [ 1441.886600] env[61573]: value = "task-4836858" [ 1441.886600] env[61573]: _type = "Task" [ 1441.886600] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.894924] env[61573]: DEBUG oslo_vmware.api [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Task: {'id': task-4836858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.331066] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1442.331527] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Creating directory with path [datastore2] vmware_temp/1aa2371d-93a5-4074-abb1-e5a5ef94269a/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1442.331599] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2f603dd-52ff-4ff8-b85a-37ae4f205455 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.345605] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Created directory with path [datastore2] vmware_temp/1aa2371d-93a5-4074-abb1-e5a5ef94269a/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1442.345815] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Fetch image to [datastore2] vmware_temp/1aa2371d-93a5-4074-abb1-e5a5ef94269a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1442.345998] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/1aa2371d-93a5-4074-abb1-e5a5ef94269a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1442.346952] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82ca1e4-99e3-4070-9a96-ff8fba4f6fbe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.354258] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9f84db-8e08-400b-a604-5d299783e4d1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.363368] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537b39f7-76fa-45f8-bb6d-8cb20fff1cd2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.398360] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d367f34a-40aa-4795-8d2c-b450b459b6b7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.406450] env[61573]: DEBUG oslo_vmware.api [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Task: {'id': task-4836858, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07388} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.407967] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1442.408187] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1442.408383] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1442.408562] env[61573]: INFO nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1442.410659] env[61573]: DEBUG nova.compute.claims [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1442.410830] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.411052] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.413531] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4c348231-5976-49e5-b17d-72700277262b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.436761] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1442.493780] env[61573]: DEBUG oslo_vmware.rw_handles [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1aa2371d-93a5-4074-abb1-e5a5ef94269a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1442.554742] env[61573]: DEBUG oslo_vmware.rw_handles [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1442.554742] env[61573]: DEBUG oslo_vmware.rw_handles [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1aa2371d-93a5-4074-abb1-e5a5ef94269a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1442.763389] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d54ea4-ff65-4cf4-9999-924f50d69fa8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.772164] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7fdd4a-2779-4ab6-845c-92c08bbac469 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.805450] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edee3994-16bd-45fd-a64d-a94dc887bfcc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.813592] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e768f05-ca34-4e77-9195-7a03c8841ad0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.827163] env[61573]: DEBUG nova.compute.provider_tree [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1442.838913] env[61573]: DEBUG nova.scheduler.client.report [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1442.857619] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.446s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.858368] env[61573]: ERROR nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1442.858368] env[61573]: Faults: ['InvalidArgument'] [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Traceback (most recent call last): [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] self.driver.spawn(context, instance, image_meta, [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] self._fetch_image_if_missing(context, vi) [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] image_cache(vi, tmp_image_ds_loc) [ 1442.858368] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] vm_util.copy_virtual_disk( [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] session._wait_for_task(vmdk_copy_task) [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] return self.wait_for_task(task_ref) [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] return evt.wait() [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] result = hub.switch() [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] return self.greenlet.switch() [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1442.858780] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] self.f(*self.args, **self.kw) [ 1442.859179] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1442.859179] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] raise exceptions.translate_fault(task_info.error) [ 1442.859179] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1442.859179] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Faults: ['InvalidArgument'] [ 1442.859179] env[61573]: ERROR nova.compute.manager [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] [ 1442.859179] env[61573]: DEBUG nova.compute.utils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1442.860743] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Build of instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 was re-scheduled: A specified parameter was not correct: fileType [ 1442.860743] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1442.861124] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1442.861310] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1442.861486] env[61573]: DEBUG nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1442.861651] env[61573]: DEBUG nova.network.neutron [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1443.181482] env[61573]: DEBUG nova.network.neutron [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.198475] env[61573]: INFO nova.compute.manager [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Took 0.34 seconds to deallocate network for instance. [ 1443.293780] env[61573]: INFO nova.scheduler.client.report [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Deleted allocations for instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 [ 1443.337484] env[61573]: DEBUG oslo_concurrency.lockutils [None req-25c150cd-b865-46ae-8725-19da5732a2eb tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 475.498s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.338775] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 309.617s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.338976] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] During sync_power_state the instance has a pending task (spawning). Skip. [ 1443.339169] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.339830] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 279.304s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.340059] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Acquiring lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.340266] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.340433] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.342304] env[61573]: INFO nova.compute.manager [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Terminating instance [ 1443.344399] env[61573]: DEBUG nova.compute.manager [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1443.344591] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1443.344849] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-477cbeff-6874-4207-a64d-8572975c955c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.355413] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e11fe33-bfd0-4cc9-a2ca-865aaaa98d91 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.365704] env[61573]: DEBUG nova.compute.manager [None req-5c2617a4-817c-4058-a7a0-fc8f08eb8e1a tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: e045b957-1670-4277-a9e3-d4ce1fb24047] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1443.388255] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8d624aa8-c52f-4d3b-bb7e-fac412249b97 could not be found. [ 1443.388476] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1443.388654] env[61573]: INFO nova.compute.manager [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1443.389036] env[61573]: DEBUG oslo.service.loopingcall [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1443.389128] env[61573]: DEBUG nova.compute.manager [-] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1443.389237] env[61573]: DEBUG nova.network.neutron [-] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1443.393575] env[61573]: DEBUG nova.compute.manager [None req-5c2617a4-817c-4058-a7a0-fc8f08eb8e1a tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] [instance: e045b957-1670-4277-a9e3-d4ce1fb24047] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1443.413368] env[61573]: DEBUG oslo_concurrency.lockutils [None req-5c2617a4-817c-4058-a7a0-fc8f08eb8e1a tempest-ImagesTestJSON-628715491 tempest-ImagesTestJSON-628715491-project-member] Lock "e045b957-1670-4277-a9e3-d4ce1fb24047" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.331s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.415363] env[61573]: DEBUG nova.network.neutron [-] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.423315] env[61573]: INFO nova.compute.manager [-] [instance: 8d624aa8-c52f-4d3b-bb7e-fac412249b97] Took 0.03 seconds to deallocate network for instance. [ 1443.423612] env[61573]: DEBUG nova.compute.manager [None req-712d33dc-ffb4-43c2-bca0-e8abbf75ab3f tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 89d2c816-dcac-41fd-b760-d93348dbbebd] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1443.448173] env[61573]: DEBUG nova.compute.manager [None req-712d33dc-ffb4-43c2-bca0-e8abbf75ab3f tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: 89d2c816-dcac-41fd-b760-d93348dbbebd] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1443.470668] env[61573]: DEBUG oslo_concurrency.lockutils [None req-712d33dc-ffb4-43c2-bca0-e8abbf75ab3f tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "89d2c816-dcac-41fd-b760-d93348dbbebd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.766s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.480105] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1443.518876] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba3e705a-5deb-43c6-a294-b5c5ab6f360d tempest-FloatingIPsAssociationTestJSON-863464870 tempest-FloatingIPsAssociationTestJSON-863464870-project-member] Lock "8d624aa8-c52f-4d3b-bb7e-fac412249b97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.534400] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.534653] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.536061] env[61573]: INFO nova.compute.claims [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1443.808080] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9bd1f2-7cf3-42d9-85ba-0a342de4755c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.816473] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69d6744-c8f9-4e20-89e7-0e57b4bd4aff {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.847447] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b72326-23fd-4bce-86fa-374abc82fb30 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.855426] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e69a65-e404-409d-ad25-4c8ac389a361 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.869263] env[61573]: DEBUG nova.compute.provider_tree [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1443.878878] env[61573]: DEBUG nova.scheduler.client.report [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1443.894741] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.360s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.895252] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1443.930134] env[61573]: DEBUG nova.compute.utils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1443.931618] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1443.931792] env[61573]: DEBUG nova.network.neutron [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1443.943997] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1443.997485] env[61573]: DEBUG nova.policy [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6d9cc0e73742b6b0081bfda15755a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c143aa5f5238459388ccd140702680ab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1444.018405] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1444.044593] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1444.044872] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1444.045051] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1444.045238] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1444.045385] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1444.045530] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1444.045736] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1444.045891] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1444.046062] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1444.046230] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1444.046400] env[61573]: DEBUG nova.virt.hardware [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1444.047349] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216ae629-1c11-457a-8ed6-5bcb9b2750d5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.055272] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddbd03d-77ea-40de-8ae3-d848a6e4b92e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.318422] env[61573]: DEBUG nova.network.neutron [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Successfully created port: ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1445.028136] env[61573]: DEBUG nova.network.neutron [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Successfully updated port: ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1445.041973] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquiring lock "refresh_cache-d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.041973] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquired lock "refresh_cache-d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.041973] env[61573]: DEBUG nova.network.neutron [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1445.113490] env[61573]: DEBUG nova.network.neutron [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1445.223195] env[61573]: DEBUG nova.compute.manager [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Received event network-vif-plugged-ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1445.223422] env[61573]: DEBUG oslo_concurrency.lockutils [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] Acquiring lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.223633] env[61573]: DEBUG oslo_concurrency.lockutils [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.223800] env[61573]: DEBUG oslo_concurrency.lockutils [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.223969] env[61573]: DEBUG nova.compute.manager [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] No waiting events found dispatching network-vif-plugged-ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1445.224603] env[61573]: WARNING nova.compute.manager [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Received unexpected event network-vif-plugged-ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3 for instance with vm_state building and task_state spawning. [ 1445.224798] env[61573]: DEBUG nova.compute.manager [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Received event network-changed-ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1445.224957] env[61573]: DEBUG nova.compute.manager [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Refreshing instance network info cache due to event network-changed-ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1445.225179] env[61573]: DEBUG oslo_concurrency.lockutils [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] Acquiring lock "refresh_cache-d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.330908] env[61573]: DEBUG nova.network.neutron [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Updating instance_info_cache with network_info: [{"id": "ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3", "address": "fa:16:3e:06:f8:33", "network": {"id": "9a3e7d51-642a-4bb9-a49f-686d267b172b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-65736915-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c143aa5f5238459388ccd140702680ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef6cb4ce-37", "ovs_interfaceid": "ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.345259] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Releasing lock "refresh_cache-d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.345551] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Instance network_info: |[{"id": "ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3", "address": "fa:16:3e:06:f8:33", "network": {"id": "9a3e7d51-642a-4bb9-a49f-686d267b172b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-65736915-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c143aa5f5238459388ccd140702680ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef6cb4ce-37", "ovs_interfaceid": "ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1445.345850] env[61573]: DEBUG oslo_concurrency.lockutils [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] Acquired lock "refresh_cache-d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.346036] env[61573]: DEBUG nova.network.neutron [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Refreshing network info cache for port ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1445.347201] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:f8:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b56036cd-97ac-47f5-9089-7b38bfe99228', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1445.354538] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Creating folder: Project (c143aa5f5238459388ccd140702680ab). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1445.358154] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5be04b8f-0b97-4fec-aea9-14db34638f44 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.369840] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Created folder: Project (c143aa5f5238459388ccd140702680ab) in parent group-v942801. [ 1445.370056] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Creating folder: Instances. Parent ref: group-v942887. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1445.370292] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ad89f1d-5120-41c8-9bf5-da1c07a6901e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.378783] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Created folder: Instances in parent group-v942887. [ 1445.379013] env[61573]: DEBUG oslo.service.loopingcall [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1445.379294] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1445.379446] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da48704e-f6dc-4e18-be50-54464ba27c8e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.400672] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.400672] env[61573]: value = "task-4836861" [ 1445.400672] env[61573]: _type = "Task" [ 1445.400672] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.408348] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836861, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.619425] env[61573]: DEBUG nova.network.neutron [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Updated VIF entry in instance network info cache for port ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1445.619802] env[61573]: DEBUG nova.network.neutron [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Updating instance_info_cache with network_info: [{"id": "ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3", "address": "fa:16:3e:06:f8:33", "network": {"id": "9a3e7d51-642a-4bb9-a49f-686d267b172b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-65736915-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c143aa5f5238459388ccd140702680ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef6cb4ce-37", "ovs_interfaceid": "ef6cb4ce-376f-40d4-9ce3-cddcc727a0c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.630670] env[61573]: DEBUG oslo_concurrency.lockutils [req-16f328dd-6ad6-48c3-a510-17cfea82594a req-550b21e1-1acd-445f-9909-76933ac8a85d service nova] Releasing lock "refresh_cache-d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.911317] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836861, 'name': CreateVM_Task, 'duration_secs': 0.298305} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.911497] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1445.912172] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.912341] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.912663] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1445.912918] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92044dfe-ba81-4e87-a9e9-a80fe2937f2c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.917666] env[61573]: DEBUG oslo_vmware.api [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Waiting for the task: (returnval){ [ 1445.917666] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]520331a7-148c-33a5-8d09-90cac5b6b094" [ 1445.917666] env[61573]: _type = "Task" [ 1445.917666] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.927508] env[61573]: DEBUG oslo_vmware.api [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]520331a7-148c-33a5-8d09-90cac5b6b094, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.428291] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.428648] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1446.428765] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.400533] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.405740] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.404785] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.404966] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1469.405066] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1469.425375] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.425704] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.425704] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.425819] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.425905] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.426040] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.426164] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.426284] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.426433] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.426568] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1469.426693] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1469.427189] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.404095] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.404384] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.403950] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.416128] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.416397] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.416594] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.416754] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1471.417949] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5284b406-d286-4af7-aeae-8a0f51f0cbf9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.427449] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10807438-a0b1-4612-a9e8-6985fd03d951 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.442107] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b299c4-75fb-42b2-9ba1-27580def75a2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.449405] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf9e4cb-b82e-4a4b-b099-8a227cbdab19 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.483339] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180554MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1471.483505] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.483689] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.564836] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.565055] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.565227] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.565436] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.565544] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.565677] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.565826] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.565964] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.566134] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.566273] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1471.578194] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1471.591217] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1471.603357] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1471.616818] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance bb3d67c0-0686-46cb-8670-6cf7267790f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1471.628254] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d50453ba-f4b2-4bd9-8545-c123f8f31878 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1471.641075] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f29eb708-7f1b-4d3c-b932-ca9a1b346b3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1471.641338] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1471.641498] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '66', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_b194ec23978b411dbedfcb7095dbb743': '1', 'io_workload': '10', 'num_proj_38a7a73ba2f74211829146bc750ec0aa': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_8df0e09d5e0344a78c770ce0fb3392ad': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_5700bfb489004c86ac0b8ec509bd3758': '1', 'num_task_spawning': '1', 'num_proj_c143aa5f5238459388ccd140702680ab': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1471.848674] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977c47ed-aa42-4900-8f55-b53de18b3e9a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.856677] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083eeef0-21cd-4053-8b38-af2a3614d1cd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.887296] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7209a12d-69a7-4ef2-b011-fb12fc9a6a18 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.895401] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66c0d78-4477-43b0-ab70-fbfb4c28d9cb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.911978] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1471.920993] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1471.935784] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1471.935784] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.452s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.936251] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.404046] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.404307] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.404458] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1491.754976] env[61573]: WARNING oslo_vmware.rw_handles [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1491.754976] env[61573]: ERROR oslo_vmware.rw_handles [ 1491.755592] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/1aa2371d-93a5-4074-abb1-e5a5ef94269a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1491.757368] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1491.757657] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Copying Virtual Disk [datastore2] vmware_temp/1aa2371d-93a5-4074-abb1-e5a5ef94269a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/1aa2371d-93a5-4074-abb1-e5a5ef94269a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1491.757931] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-206f8a0a-0f70-425c-9c19-cc85f84e9a96 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.766534] env[61573]: DEBUG oslo_vmware.api [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Waiting for the task: (returnval){ [ 1491.766534] env[61573]: value = "task-4836862" [ 1491.766534] env[61573]: _type = "Task" [ 1491.766534] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.775019] env[61573]: DEBUG oslo_vmware.api [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Task: {'id': task-4836862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.277613] env[61573]: DEBUG oslo_vmware.exceptions [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1492.277901] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.278477] env[61573]: ERROR nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1492.278477] env[61573]: Faults: ['InvalidArgument'] [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Traceback (most recent call last): [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] yield resources [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] self.driver.spawn(context, instance, image_meta, [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] self._fetch_image_if_missing(context, vi) [ 1492.278477] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] image_cache(vi, tmp_image_ds_loc) [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] vm_util.copy_virtual_disk( [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] session._wait_for_task(vmdk_copy_task) [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] return self.wait_for_task(task_ref) [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] return evt.wait() [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] result = hub.switch() [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1492.278890] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] return self.greenlet.switch() [ 1492.279307] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1492.279307] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] self.f(*self.args, **self.kw) [ 1492.279307] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1492.279307] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] raise exceptions.translate_fault(task_info.error) [ 1492.279307] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1492.279307] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Faults: ['InvalidArgument'] [ 1492.279307] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] [ 1492.279307] env[61573]: INFO nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Terminating instance [ 1492.280414] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.280616] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1492.280859] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d922b78e-a2df-4a3c-9296-21442822d202 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.283101] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1492.283296] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1492.284052] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdd8e83-88ce-43e5-adc5-0beb52ea3a96 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.291366] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1492.291603] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd1ef866-1e4f-4d9f-90e7-50c479bbf7ec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.293935] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1492.294117] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1492.295098] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aca64dcf-9b2c-4bec-8361-bc6355a6d91a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.300228] env[61573]: DEBUG oslo_vmware.api [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Waiting for the task: (returnval){ [ 1492.300228] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52bdb81d-8f9a-87e7-0b93-dea1c28a6fea" [ 1492.300228] env[61573]: _type = "Task" [ 1492.300228] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.307905] env[61573]: DEBUG oslo_vmware.api [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52bdb81d-8f9a-87e7-0b93-dea1c28a6fea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.362302] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1492.362544] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1492.362693] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Deleting the datastore file [datastore2] 8619fddd-ad89-42b3-95c5-55def25b6df2 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1492.362973] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc17b8ba-1aa4-4c51-9671-3c0c9587e1c3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.370067] env[61573]: DEBUG oslo_vmware.api [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Waiting for the task: (returnval){ [ 1492.370067] env[61573]: value = "task-4836864" [ 1492.370067] env[61573]: _type = "Task" [ 1492.370067] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.378152] env[61573]: DEBUG oslo_vmware.api [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Task: {'id': task-4836864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.811689] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1492.812018] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Creating directory with path [datastore2] vmware_temp/d73b84bb-cabe-4146-9fe9-74284c46fab9/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1492.812966] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7167f819-d526-4dd4-b3de-fe96632b3802 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.824456] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Created directory with path [datastore2] vmware_temp/d73b84bb-cabe-4146-9fe9-74284c46fab9/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1492.824698] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Fetch image to [datastore2] vmware_temp/d73b84bb-cabe-4146-9fe9-74284c46fab9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1492.824901] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/d73b84bb-cabe-4146-9fe9-74284c46fab9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1492.825683] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98181160-ccd9-44d3-a020-0a35db246405 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.832756] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90d34b8-c2ef-4d6c-a56f-6c090125974b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.841910] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568c5cdb-e48d-4b01-bc01-be5da4060e95 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.876292] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d4c543-6d94-4aec-b6c9-5a8ba1ed9468 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.883743] env[61573]: DEBUG oslo_vmware.api [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Task: {'id': task-4836864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083456} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.885215] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1492.885413] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1492.885582] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1492.885754] env[61573]: INFO nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1492.887614] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-df21200d-8334-4cc0-94fb-810c2be5c8f2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.889565] env[61573]: DEBUG nova.compute.claims [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1492.889744] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.889954] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.913325] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1492.966121] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d73b84bb-cabe-4146-9fe9-74284c46fab9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1493.025054] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1493.025180] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d73b84bb-cabe-4146-9fe9-74284c46fab9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1493.192396] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1da167a-52e4-46dd-b2a4-731640cb6526 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.200402] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc496c1-1c85-433e-a432-1273a03a4b29 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.229333] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3028bf1c-57a5-4f02-8430-7ffc273c5981 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.236652] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc1f182-abab-4cb1-a89a-e0d1cbda739c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.250918] env[61573]: DEBUG nova.compute.provider_tree [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1493.259713] env[61573]: DEBUG nova.scheduler.client.report [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1493.273847] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.384s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.274469] env[61573]: ERROR nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1493.274469] env[61573]: Faults: ['InvalidArgument'] [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Traceback (most recent call last): [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] self.driver.spawn(context, instance, image_meta, [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] self._fetch_image_if_missing(context, vi) [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] image_cache(vi, tmp_image_ds_loc) [ 1493.274469] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] vm_util.copy_virtual_disk( [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] session._wait_for_task(vmdk_copy_task) [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] return self.wait_for_task(task_ref) [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] return evt.wait() [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] result = hub.switch() [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] return self.greenlet.switch() [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1493.274867] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] self.f(*self.args, **self.kw) [ 1493.275338] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1493.275338] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] raise exceptions.translate_fault(task_info.error) [ 1493.275338] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1493.275338] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Faults: ['InvalidArgument'] [ 1493.275338] env[61573]: ERROR nova.compute.manager [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] [ 1493.275338] env[61573]: DEBUG nova.compute.utils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1493.276594] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Build of instance 8619fddd-ad89-42b3-95c5-55def25b6df2 was re-scheduled: A specified parameter was not correct: fileType [ 1493.276594] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1493.276981] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1493.277173] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1493.277341] env[61573]: DEBUG nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1493.277501] env[61573]: DEBUG nova.network.neutron [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1493.656698] env[61573]: DEBUG nova.network.neutron [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.667725] env[61573]: INFO nova.compute.manager [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Took 0.39 seconds to deallocate network for instance. [ 1493.784362] env[61573]: INFO nova.scheduler.client.report [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Deleted allocations for instance 8619fddd-ad89-42b3-95c5-55def25b6df2 [ 1493.813022] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d66266be-14b0-48ab-a18f-37815cfd38ad tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 524.425s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.814483] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 360.093s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.814705] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] During sync_power_state the instance has a pending task (spawning). Skip. [ 1493.814890] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.815804] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 328.175s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.816418] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Acquiring lock "8619fddd-ad89-42b3-95c5-55def25b6df2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.816418] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.816418] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.818443] env[61573]: INFO nova.compute.manager [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Terminating instance [ 1493.820649] env[61573]: DEBUG nova.compute.manager [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1493.820845] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1493.821146] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd4cb1dc-ea49-468f-b725-ef0bc811e937 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.833633] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2905219e-876f-495d-8276-35f60e961645 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.843348] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1493.867946] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8619fddd-ad89-42b3-95c5-55def25b6df2 could not be found. [ 1493.868182] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1493.868362] env[61573]: INFO nova.compute.manager [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1493.868607] env[61573]: DEBUG oslo.service.loopingcall [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.868901] env[61573]: DEBUG nova.compute.manager [-] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1493.869010] env[61573]: DEBUG nova.network.neutron [-] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1493.890907] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.891178] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.892842] env[61573]: INFO nova.compute.claims [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1493.904785] env[61573]: DEBUG nova.network.neutron [-] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.934911] env[61573]: INFO nova.compute.manager [-] [instance: 8619fddd-ad89-42b3-95c5-55def25b6df2] Took 0.07 seconds to deallocate network for instance. [ 1494.040071] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf911235-dba9-4b77-9e01-c519e2e6f3c3 tempest-ImagesNegativeTestJSON-927742358 tempest-ImagesNegativeTestJSON-927742358-project-member] Lock "8619fddd-ad89-42b3-95c5-55def25b6df2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.224s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.155062] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef19f28-cd2c-4ea0-8fdd-f6b5e54e74c1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.163750] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea16e629-f5da-4f47-8467-cd39103aa24d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.194967] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c6b034-3bca-4ecd-95de-0786d9f68374 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.202880] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3e14b7-1e6f-4c35-88cd-4b0b10ab9fad {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.216229] env[61573]: DEBUG nova.compute.provider_tree [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.225169] env[61573]: DEBUG nova.scheduler.client.report [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1494.239834] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.349s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.240342] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1494.271530] env[61573]: DEBUG nova.compute.utils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1494.273199] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1494.273413] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1494.282544] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1494.338962] env[61573]: DEBUG nova.policy [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81607c4c942e464f82073635c8691d54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0341bbb698194bf6a4cbca166a5dfffe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1494.344834] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1494.370268] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1494.370520] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1494.370675] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1494.370865] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1494.371017] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1494.371211] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1494.371486] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1494.371584] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1494.371748] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1494.371914] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1494.372168] env[61573]: DEBUG nova.virt.hardware [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1494.373040] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87598bba-5ef9-43ee-88a0-cf058ed63a71 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.381434] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58235b93-8093-4d20-b538-0026163d8d1f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.757274] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Successfully created port: da2853f1-0b16-4285-8cc9-82a5dea0e91a {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1495.140586] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Successfully created port: e265db33-9d8b-4507-8bb6-c2c2a0f8de64 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1495.638990] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Successfully created port: c420b840-3945-47fa-9d15-bc5c2d0edc0e {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1496.303376] env[61573]: DEBUG nova.compute.manager [req-d75aeefa-c0a4-4249-bee4-7c1357ebcf7f req-07f35ade-c33e-48bb-852a-06e489add5df service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Received event network-vif-plugged-da2853f1-0b16-4285-8cc9-82a5dea0e91a {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1496.303648] env[61573]: DEBUG oslo_concurrency.lockutils [req-d75aeefa-c0a4-4249-bee4-7c1357ebcf7f req-07f35ade-c33e-48bb-852a-06e489add5df service nova] Acquiring lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.303842] env[61573]: DEBUG oslo_concurrency.lockutils [req-d75aeefa-c0a4-4249-bee4-7c1357ebcf7f req-07f35ade-c33e-48bb-852a-06e489add5df service nova] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.304032] env[61573]: DEBUG oslo_concurrency.lockutils [req-d75aeefa-c0a4-4249-bee4-7c1357ebcf7f req-07f35ade-c33e-48bb-852a-06e489add5df service nova] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.304203] env[61573]: DEBUG nova.compute.manager [req-d75aeefa-c0a4-4249-bee4-7c1357ebcf7f req-07f35ade-c33e-48bb-852a-06e489add5df service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] No waiting events found dispatching network-vif-plugged-da2853f1-0b16-4285-8cc9-82a5dea0e91a {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1496.304368] env[61573]: WARNING nova.compute.manager [req-d75aeefa-c0a4-4249-bee4-7c1357ebcf7f req-07f35ade-c33e-48bb-852a-06e489add5df service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Received unexpected event network-vif-plugged-da2853f1-0b16-4285-8cc9-82a5dea0e91a for instance with vm_state building and task_state spawning. [ 1496.389556] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Successfully updated port: da2853f1-0b16-4285-8cc9-82a5dea0e91a {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1497.206249] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Successfully updated port: e265db33-9d8b-4507-8bb6-c2c2a0f8de64 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1498.072454] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Successfully updated port: c420b840-3945-47fa-9d15-bc5c2d0edc0e {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1498.088354] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.088354] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquired lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.091014] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1498.162373] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1498.342561] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Received event network-changed-da2853f1-0b16-4285-8cc9-82a5dea0e91a {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1498.342938] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Refreshing instance network info cache due to event network-changed-da2853f1-0b16-4285-8cc9-82a5dea0e91a. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1498.342938] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Acquiring lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.970784] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Updating instance_info_cache with network_info: [{"id": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "address": "fa:16:3e:b1:07:8e", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2853f1-0b", "ovs_interfaceid": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "address": "fa:16:3e:59:03:72", "network": {"id": "9f796a0f-031a-44b0-93e2-6ec554a72896", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1937936146", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape265db33-9d", "ovs_interfaceid": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "address": "fa:16:3e:f5:20:c2", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc420b840-39", "ovs_interfaceid": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.985606] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Releasing lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.985961] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Instance network_info: |[{"id": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "address": "fa:16:3e:b1:07:8e", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2853f1-0b", "ovs_interfaceid": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "address": "fa:16:3e:59:03:72", "network": {"id": "9f796a0f-031a-44b0-93e2-6ec554a72896", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1937936146", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape265db33-9d", "ovs_interfaceid": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "address": "fa:16:3e:f5:20:c2", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc420b840-39", "ovs_interfaceid": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1498.986296] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Acquired lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.986472] env[61573]: DEBUG nova.network.neutron [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Refreshing network info cache for port da2853f1-0b16-4285-8cc9-82a5dea0e91a {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1498.987607] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:07:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4e02e98f-44ce-42b7-a3ac-4034fae5d127', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da2853f1-0b16-4285-8cc9-82a5dea0e91a', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:03:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e265db33-9d8b-4507-8bb6-c2c2a0f8de64', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:20:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4e02e98f-44ce-42b7-a3ac-4034fae5d127', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c420b840-3945-47fa-9d15-bc5c2d0edc0e', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1498.998720] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Creating folder: Project (0341bbb698194bf6a4cbca166a5dfffe). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1499.001765] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad4c1291-c6be-4eb8-9022-04508f5a465f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.013730] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Created folder: Project (0341bbb698194bf6a4cbca166a5dfffe) in parent group-v942801. [ 1499.013938] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Creating folder: Instances. Parent ref: group-v942890. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1499.014205] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e93862f-16eb-4254-bd73-9f6fe731358d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.024448] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Created folder: Instances in parent group-v942890. [ 1499.024734] env[61573]: DEBUG oslo.service.loopingcall [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.024931] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1499.025166] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e5c6554-10b3-4a54-b35d-176812211d86 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.052333] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1499.052333] env[61573]: value = "task-4836867" [ 1499.052333] env[61573]: _type = "Task" [ 1499.052333] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.060401] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836867, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.286940] env[61573]: DEBUG nova.network.neutron [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Updated VIF entry in instance network info cache for port da2853f1-0b16-4285-8cc9-82a5dea0e91a. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1499.287432] env[61573]: DEBUG nova.network.neutron [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Updating instance_info_cache with network_info: [{"id": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "address": "fa:16:3e:b1:07:8e", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2853f1-0b", "ovs_interfaceid": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "address": "fa:16:3e:59:03:72", "network": {"id": "9f796a0f-031a-44b0-93e2-6ec554a72896", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1937936146", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape265db33-9d", "ovs_interfaceid": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "address": "fa:16:3e:f5:20:c2", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc420b840-39", "ovs_interfaceid": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.299438] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Releasing lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.299804] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Received event network-vif-plugged-e265db33-9d8b-4507-8bb6-c2c2a0f8de64 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1499.300068] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Acquiring lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.300331] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.300537] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.300736] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] No waiting events found dispatching network-vif-plugged-e265db33-9d8b-4507-8bb6-c2c2a0f8de64 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1499.301088] env[61573]: WARNING nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Received unexpected event network-vif-plugged-e265db33-9d8b-4507-8bb6-c2c2a0f8de64 for instance with vm_state building and task_state spawning. [ 1499.301292] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Received event network-changed-e265db33-9d8b-4507-8bb6-c2c2a0f8de64 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1499.301457] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Refreshing instance network info cache due to event network-changed-e265db33-9d8b-4507-8bb6-c2c2a0f8de64. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1499.301694] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Acquiring lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.301862] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Acquired lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.302086] env[61573]: DEBUG nova.network.neutron [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Refreshing network info cache for port e265db33-9d8b-4507-8bb6-c2c2a0f8de64 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1499.563670] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836867, 'name': CreateVM_Task, 'duration_secs': 0.367035} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.563828] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1499.564765] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.565323] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.565323] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1499.565509] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c91dacb-50e3-4820-aefd-50fa9a5beea7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.570227] env[61573]: DEBUG oslo_vmware.api [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for the task: (returnval){ [ 1499.570227] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c15cfd-12db-b05f-bd8f-5061617e98fb" [ 1499.570227] env[61573]: _type = "Task" [ 1499.570227] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.578279] env[61573]: DEBUG oslo_vmware.api [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c15cfd-12db-b05f-bd8f-5061617e98fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.596382] env[61573]: DEBUG nova.network.neutron [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Updated VIF entry in instance network info cache for port e265db33-9d8b-4507-8bb6-c2c2a0f8de64. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1499.596853] env[61573]: DEBUG nova.network.neutron [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Updating instance_info_cache with network_info: [{"id": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "address": "fa:16:3e:b1:07:8e", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2853f1-0b", "ovs_interfaceid": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "address": "fa:16:3e:59:03:72", "network": {"id": "9f796a0f-031a-44b0-93e2-6ec554a72896", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1937936146", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape265db33-9d", "ovs_interfaceid": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "address": "fa:16:3e:f5:20:c2", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc420b840-39", "ovs_interfaceid": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.606659] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Releasing lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.606924] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Received event network-vif-plugged-c420b840-3945-47fa-9d15-bc5c2d0edc0e {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1499.607140] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Acquiring lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.607344] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.607506] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.607678] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] No waiting events found dispatching network-vif-plugged-c420b840-3945-47fa-9d15-bc5c2d0edc0e {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1499.607848] env[61573]: WARNING nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Received unexpected event network-vif-plugged-c420b840-3945-47fa-9d15-bc5c2d0edc0e for instance with vm_state building and task_state spawning. [ 1499.608014] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Received event network-changed-c420b840-3945-47fa-9d15-bc5c2d0edc0e {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1499.608185] env[61573]: DEBUG nova.compute.manager [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Refreshing instance network info cache due to event network-changed-c420b840-3945-47fa-9d15-bc5c2d0edc0e. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1499.608367] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Acquiring lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.608502] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Acquired lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.608659] env[61573]: DEBUG nova.network.neutron [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Refreshing network info cache for port c420b840-3945-47fa-9d15-bc5c2d0edc0e {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1499.884739] env[61573]: DEBUG nova.network.neutron [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Updated VIF entry in instance network info cache for port c420b840-3945-47fa-9d15-bc5c2d0edc0e. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1499.885225] env[61573]: DEBUG nova.network.neutron [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Updating instance_info_cache with network_info: [{"id": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "address": "fa:16:3e:b1:07:8e", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2853f1-0b", "ovs_interfaceid": "da2853f1-0b16-4285-8cc9-82a5dea0e91a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "address": "fa:16:3e:59:03:72", "network": {"id": "9f796a0f-031a-44b0-93e2-6ec554a72896", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1937936146", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape265db33-9d", "ovs_interfaceid": "e265db33-9d8b-4507-8bb6-c2c2a0f8de64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "address": "fa:16:3e:f5:20:c2", "network": {"id": "6ed84aa6-fdcd-4f7a-b8ac-d62ef1153a1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1148246817", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc420b840-39", "ovs_interfaceid": "c420b840-3945-47fa-9d15-bc5c2d0edc0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.895307] env[61573]: DEBUG oslo_concurrency.lockutils [req-5e190001-1717-44f5-8dd4-1d8aab940cf3 req-fb6b76c8-beaa-4b3e-a9ad-32c902a46b86 service nova] Releasing lock "refresh_cache-adb2282f-224e-4a56-abd8-cd91bd0023f0" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.081622] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.081880] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1500.082109] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.404348] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.404682] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1529.404682] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1529.430634] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.430795] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.430913] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.431201] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.431415] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.431513] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.431632] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.431747] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.431864] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.431978] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1529.432106] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1529.432614] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1531.405093] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.399332] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.404296] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.404541] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.416924] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.417260] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.417357] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.417513] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1532.418692] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7730ad82-96b9-411c-a7c8-b4c5d952f5c5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.427799] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab67291-60be-4d4f-9fb9-c7a75ded1bdf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.443061] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20698671-a0b5-4f8f-83d4-97d3279ba5b5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.449874] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ada0ec-742b-420a-a5af-8e3cafd9785d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.479756] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180531MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1532.479899] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.480120] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.554972] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance b1392e68-7dc9-4399-88a6-9463e06917b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.555148] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.555277] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.555396] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.555512] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.555628] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.555741] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.555853] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.555966] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.556089] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1532.568569] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1532.579306] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1532.589336] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance bb3d67c0-0686-46cb-8670-6cf7267790f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1532.599723] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d50453ba-f4b2-4bd9-8545-c123f8f31878 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1532.610192] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f29eb708-7f1b-4d3c-b932-ca9a1b346b3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1532.610475] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1532.610670] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '67', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_38a7a73ba2f74211829146bc750ec0aa': '1', 'io_workload': '10', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_8df0e09d5e0344a78c770ce0fb3392ad': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_5700bfb489004c86ac0b8ec509bd3758': '1', 'num_task_spawning': '2', 'num_proj_c143aa5f5238459388ccd140702680ab': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1532.794640] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa669db-4b06-48a9-a953-67004d97928e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.802401] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16080775-637e-4eb6-90eb-27c486b88677 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.831726] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56be6d36-1eb5-4a62-b403-ba0798c52b96 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.839706] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b9f598-8afa-4b01-896a-2d091eafcf3b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.853935] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1532.862593] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1532.877187] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1532.877406] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.397s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.876820] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.877252] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.877252] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1535.404336] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1541.775858] env[61573]: WARNING oslo_vmware.rw_handles [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1541.775858] env[61573]: ERROR oslo_vmware.rw_handles [ 1541.775858] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/d73b84bb-cabe-4146-9fe9-74284c46fab9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1541.777964] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1541.778238] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Copying Virtual Disk [datastore2] vmware_temp/d73b84bb-cabe-4146-9fe9-74284c46fab9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/d73b84bb-cabe-4146-9fe9-74284c46fab9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1541.778537] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-514c90c8-48d4-44d4-bca6-bdbe2b1aace2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.787515] env[61573]: DEBUG oslo_vmware.api [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Waiting for the task: (returnval){ [ 1541.787515] env[61573]: value = "task-4836868" [ 1541.787515] env[61573]: _type = "Task" [ 1541.787515] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.796090] env[61573]: DEBUG oslo_vmware.api [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Task: {'id': task-4836868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.299034] env[61573]: DEBUG oslo_vmware.exceptions [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1542.299034] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.299249] env[61573]: ERROR nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1542.299249] env[61573]: Faults: ['InvalidArgument'] [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Traceback (most recent call last): [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] yield resources [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] self.driver.spawn(context, instance, image_meta, [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] self._fetch_image_if_missing(context, vi) [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] image_cache(vi, tmp_image_ds_loc) [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] vm_util.copy_virtual_disk( [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] session._wait_for_task(vmdk_copy_task) [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] return self.wait_for_task(task_ref) [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] return evt.wait() [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] result = hub.switch() [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] return self.greenlet.switch() [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] self.f(*self.args, **self.kw) [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] raise exceptions.translate_fault(task_info.error) [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Faults: ['InvalidArgument'] [ 1542.299249] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] [ 1542.300073] env[61573]: INFO nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Terminating instance [ 1542.300896] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.301114] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1542.301346] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d125316-6854-4d04-b49e-cec4b145378c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.303641] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1542.303834] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1542.304555] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027191fb-db60-44a0-b4ee-780c31c53dac {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.311519] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1542.311755] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a788bd97-e6d1-46f5-aaa8-9c3ebf4986a9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.313846] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1542.314017] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1542.314947] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ce45975-860e-4163-9454-fbff598c519a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.320885] env[61573]: DEBUG oslo_vmware.api [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Waiting for the task: (returnval){ [ 1542.320885] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5271a168-a069-8deb-89f7-ae3c384f3e7a" [ 1542.320885] env[61573]: _type = "Task" [ 1542.320885] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.328039] env[61573]: DEBUG oslo_vmware.api [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5271a168-a069-8deb-89f7-ae3c384f3e7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.387395] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1542.387713] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1542.387876] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Deleting the datastore file [datastore2] b1392e68-7dc9-4399-88a6-9463e06917b4 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1542.388105] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d75f7358-4f74-4802-90f3-ca9d44c39e7f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.394625] env[61573]: DEBUG oslo_vmware.api [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Waiting for the task: (returnval){ [ 1542.394625] env[61573]: value = "task-4836870" [ 1542.394625] env[61573]: _type = "Task" [ 1542.394625] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.403733] env[61573]: DEBUG oslo_vmware.api [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Task: {'id': task-4836870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.831678] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1542.832048] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Creating directory with path [datastore2] vmware_temp/dc4a2df8-7bef-4e84-9d6c-57f2ab6daf5e/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1542.832192] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72ca63c8-8d15-4abd-be2c-74d94913d7e8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.844684] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Created directory with path [datastore2] vmware_temp/dc4a2df8-7bef-4e84-9d6c-57f2ab6daf5e/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1542.844904] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Fetch image to [datastore2] vmware_temp/dc4a2df8-7bef-4e84-9d6c-57f2ab6daf5e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1542.845088] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/dc4a2df8-7bef-4e84-9d6c-57f2ab6daf5e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1542.845872] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25e7873-56f8-40ec-bb62-f615e8edf918 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.853525] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b908da-0816-43ea-84a9-d94c5501827a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.862883] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4fb98f-779d-4d66-a995-addf94fa2ea6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.894562] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bd8a67-d75e-42c8-a789-c8d74003dc1c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.905383] env[61573]: DEBUG oslo_vmware.api [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Task: {'id': task-4836870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067694} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.906903] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1542.907120] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1542.907297] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1542.907474] env[61573]: INFO nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1542.909394] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b698d710-5db7-4447-8b8f-7531942c4cd6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.911333] env[61573]: DEBUG nova.compute.claims [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1542.911537] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.911713] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.940247] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1543.062373] env[61573]: DEBUG oslo_vmware.rw_handles [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dc4a2df8-7bef-4e84-9d6c-57f2ab6daf5e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1543.121529] env[61573]: DEBUG oslo_vmware.rw_handles [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1543.121722] env[61573]: DEBUG oslo_vmware.rw_handles [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dc4a2df8-7bef-4e84-9d6c-57f2ab6daf5e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1543.206081] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744d93df-3b13-446d-af91-bfafda025464 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.214649] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19eced4f-3119-4c3e-953a-af9121f9fb1b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.246046] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f8a2c6-6bc3-4637-ab2b-74768470649b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.253975] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232e585b-32ff-4704-ae76-d982b0922a39 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.269534] env[61573]: DEBUG nova.compute.provider_tree [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1543.278490] env[61573]: DEBUG nova.scheduler.client.report [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1543.292866] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.381s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.293436] env[61573]: ERROR nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1543.293436] env[61573]: Faults: ['InvalidArgument'] [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Traceback (most recent call last): [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] self.driver.spawn(context, instance, image_meta, [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] self._fetch_image_if_missing(context, vi) [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] image_cache(vi, tmp_image_ds_loc) [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] vm_util.copy_virtual_disk( [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] session._wait_for_task(vmdk_copy_task) [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] return self.wait_for_task(task_ref) [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] return evt.wait() [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] result = hub.switch() [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] return self.greenlet.switch() [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] self.f(*self.args, **self.kw) [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] raise exceptions.translate_fault(task_info.error) [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Faults: ['InvalidArgument'] [ 1543.293436] env[61573]: ERROR nova.compute.manager [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] [ 1543.294287] env[61573]: DEBUG nova.compute.utils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1543.295884] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Build of instance b1392e68-7dc9-4399-88a6-9463e06917b4 was re-scheduled: A specified parameter was not correct: fileType [ 1543.295884] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1543.296278] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1543.296453] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1543.296621] env[61573]: DEBUG nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1543.296782] env[61573]: DEBUG nova.network.neutron [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1543.698779] env[61573]: DEBUG nova.network.neutron [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.714979] env[61573]: INFO nova.compute.manager [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Took 0.42 seconds to deallocate network for instance. [ 1543.856545] env[61573]: INFO nova.scheduler.client.report [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Deleted allocations for instance b1392e68-7dc9-4399-88a6-9463e06917b4 [ 1543.881028] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ff2839d6-4bfb-4659-b40b-7c95d61a9be3 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "b1392e68-7dc9-4399-88a6-9463e06917b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 568.044s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.882346] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "b1392e68-7dc9-4399-88a6-9463e06917b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 371.703s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.882886] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Acquiring lock "b1392e68-7dc9-4399-88a6-9463e06917b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.883047] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "b1392e68-7dc9-4399-88a6-9463e06917b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.883224] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "b1392e68-7dc9-4399-88a6-9463e06917b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.886128] env[61573]: INFO nova.compute.manager [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Terminating instance [ 1543.888231] env[61573]: DEBUG nova.compute.manager [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1543.888355] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1543.889038] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-654f5060-6fa4-4b1e-87e8-154963feb2fb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.893792] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1543.906211] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a33a7c-59f6-43e7-907b-0c7a38703f74 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.940804] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b1392e68-7dc9-4399-88a6-9463e06917b4 could not be found. [ 1543.941112] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1543.941350] env[61573]: INFO nova.compute.manager [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1543.941646] env[61573]: DEBUG oslo.service.loopingcall [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1543.942105] env[61573]: DEBUG nova.compute.manager [-] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1543.942271] env[61573]: DEBUG nova.network.neutron [-] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1543.965545] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.965808] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.967536] env[61573]: INFO nova.compute.claims [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1543.980699] env[61573]: DEBUG nova.network.neutron [-] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.003571] env[61573]: INFO nova.compute.manager [-] [instance: b1392e68-7dc9-4399-88a6-9463e06917b4] Took 0.06 seconds to deallocate network for instance. [ 1544.098161] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7b083260-ad36-427d-a415-ac26a53c6805 tempest-ServerPasswordTestJSON-394852036 tempest-ServerPasswordTestJSON-394852036-project-member] Lock "b1392e68-7dc9-4399-88a6-9463e06917b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.212082] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d501e15-3e9d-4495-8c96-3b448ddd0686 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.221043] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4036e733-ec2e-485e-8f44-6e031bdff67d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.252795] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d169dc-652b-44a7-b91c-bf2fcc8aab03 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.260481] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2869fdc-39f3-43cc-9a61-ab88667b3426 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.273984] env[61573]: DEBUG nova.compute.provider_tree [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1544.284412] env[61573]: DEBUG nova.scheduler.client.report [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1544.300890] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.335s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.301416] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1544.334576] env[61573]: DEBUG nova.compute.utils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1544.336038] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1544.336225] env[61573]: DEBUG nova.network.neutron [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1544.346901] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1544.415717] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1544.444593] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1544.444844] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1544.445096] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.445319] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1544.445478] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.445630] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1544.445840] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1544.446009] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1544.446185] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1544.446350] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1544.446558] env[61573]: DEBUG nova.virt.hardware [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1544.447490] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5023732-8740-4806-9f84-ce9e76f65068 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.456416] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acccbf12-402f-4a1e-ab34-a3fc697e7796 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.639397] env[61573]: DEBUG nova.policy [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31cd6718df1c44f3ba30eb71f36cdb69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8caf6003840413c8eff7d84d9b185cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1545.039325] env[61573]: DEBUG nova.network.neutron [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Successfully created port: befb57e6-1778-4d27-98ae-2f7611144dd2 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1545.770493] env[61573]: DEBUG nova.network.neutron [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Successfully updated port: befb57e6-1778-4d27-98ae-2f7611144dd2 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1545.797487] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "refresh_cache-d272f481-f590-46e9-9f51-0c7601ff34ce" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.797635] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquired lock "refresh_cache-d272f481-f590-46e9-9f51-0c7601ff34ce" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.797785] env[61573]: DEBUG nova.network.neutron [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1545.871821] env[61573]: DEBUG nova.network.neutron [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1545.910442] env[61573]: DEBUG nova.compute.manager [req-c416cd22-5388-43af-8390-7631340388fb req-fe3677f9-8786-4957-b59e-63d9dcfebe72 service nova] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Received event network-vif-plugged-befb57e6-1778-4d27-98ae-2f7611144dd2 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1545.910708] env[61573]: DEBUG oslo_concurrency.lockutils [req-c416cd22-5388-43af-8390-7631340388fb req-fe3677f9-8786-4957-b59e-63d9dcfebe72 service nova] Acquiring lock "d272f481-f590-46e9-9f51-0c7601ff34ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.910920] env[61573]: DEBUG oslo_concurrency.lockutils [req-c416cd22-5388-43af-8390-7631340388fb req-fe3677f9-8786-4957-b59e-63d9dcfebe72 service nova] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.911105] env[61573]: DEBUG oslo_concurrency.lockutils [req-c416cd22-5388-43af-8390-7631340388fb req-fe3677f9-8786-4957-b59e-63d9dcfebe72 service nova] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.911285] env[61573]: DEBUG nova.compute.manager [req-c416cd22-5388-43af-8390-7631340388fb req-fe3677f9-8786-4957-b59e-63d9dcfebe72 service nova] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] No waiting events found dispatching network-vif-plugged-befb57e6-1778-4d27-98ae-2f7611144dd2 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1545.911451] env[61573]: WARNING nova.compute.manager [req-c416cd22-5388-43af-8390-7631340388fb req-fe3677f9-8786-4957-b59e-63d9dcfebe72 service nova] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Received unexpected event network-vif-plugged-befb57e6-1778-4d27-98ae-2f7611144dd2 for instance with vm_state building and task_state spawning. [ 1546.164106] env[61573]: DEBUG nova.network.neutron [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Updating instance_info_cache with network_info: [{"id": "befb57e6-1778-4d27-98ae-2f7611144dd2", "address": "fa:16:3e:05:7c:c0", "network": {"id": "78bbca97-89ba-4b32-a8e8-f83e40d94593", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-851977150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8caf6003840413c8eff7d84d9b185cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbefb57e6-17", "ovs_interfaceid": "befb57e6-1778-4d27-98ae-2f7611144dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.181910] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Releasing lock "refresh_cache-d272f481-f590-46e9-9f51-0c7601ff34ce" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.182369] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Instance network_info: |[{"id": "befb57e6-1778-4d27-98ae-2f7611144dd2", "address": "fa:16:3e:05:7c:c0", "network": {"id": "78bbca97-89ba-4b32-a8e8-f83e40d94593", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-851977150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8caf6003840413c8eff7d84d9b185cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbefb57e6-17", "ovs_interfaceid": "befb57e6-1778-4d27-98ae-2f7611144dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1546.183063] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:7c:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '664c466b-9417-49d7-83cc-364d964c403a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'befb57e6-1778-4d27-98ae-2f7611144dd2', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1546.191612] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Creating folder: Project (d8caf6003840413c8eff7d84d9b185cb). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1546.192331] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8724c79f-583c-43d4-990e-f052c72f565c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.205160] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Created folder: Project (d8caf6003840413c8eff7d84d9b185cb) in parent group-v942801. [ 1546.205390] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Creating folder: Instances. Parent ref: group-v942893. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1546.205728] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf22ad0e-a172-4f01-972f-0bfd2b7f229b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.214851] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Created folder: Instances in parent group-v942893. [ 1546.215110] env[61573]: DEBUG oslo.service.loopingcall [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1546.215299] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1546.215508] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-490f0613-0414-45ce-9c6a-2536f1371ed4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.234697] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1546.234697] env[61573]: value = "task-4836873" [ 1546.234697] env[61573]: _type = "Task" [ 1546.234697] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.242419] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836873, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.744963] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836873, 'name': CreateVM_Task, 'duration_secs': 0.293086} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.745176] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1546.745853] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.746031] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.746375] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1546.746666] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3eb0580-8a23-4132-a81a-e485188bc1dd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.751635] env[61573]: DEBUG oslo_vmware.api [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for the task: (returnval){ [ 1546.751635] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]525a91d0-2423-3940-6ea8-8af7343bdb02" [ 1546.751635] env[61573]: _type = "Task" [ 1546.751635] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.760320] env[61573]: DEBUG oslo_vmware.api [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]525a91d0-2423-3940-6ea8-8af7343bdb02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.261581] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.261933] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1547.262032] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.934287] env[61573]: DEBUG nova.compute.manager [req-7b0dcec7-d555-46d4-a76d-05091b96f6a6 req-c3221f9c-b451-4851-8c55-6455343f2291 service nova] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Received event network-changed-befb57e6-1778-4d27-98ae-2f7611144dd2 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1547.934539] env[61573]: DEBUG nova.compute.manager [req-7b0dcec7-d555-46d4-a76d-05091b96f6a6 req-c3221f9c-b451-4851-8c55-6455343f2291 service nova] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Refreshing instance network info cache due to event network-changed-befb57e6-1778-4d27-98ae-2f7611144dd2. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1547.934725] env[61573]: DEBUG oslo_concurrency.lockutils [req-7b0dcec7-d555-46d4-a76d-05091b96f6a6 req-c3221f9c-b451-4851-8c55-6455343f2291 service nova] Acquiring lock "refresh_cache-d272f481-f590-46e9-9f51-0c7601ff34ce" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.934810] env[61573]: DEBUG oslo_concurrency.lockutils [req-7b0dcec7-d555-46d4-a76d-05091b96f6a6 req-c3221f9c-b451-4851-8c55-6455343f2291 service nova] Acquired lock "refresh_cache-d272f481-f590-46e9-9f51-0c7601ff34ce" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.934966] env[61573]: DEBUG nova.network.neutron [req-7b0dcec7-d555-46d4-a76d-05091b96f6a6 req-c3221f9c-b451-4851-8c55-6455343f2291 service nova] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Refreshing network info cache for port befb57e6-1778-4d27-98ae-2f7611144dd2 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1548.251898] env[61573]: DEBUG nova.network.neutron [req-7b0dcec7-d555-46d4-a76d-05091b96f6a6 req-c3221f9c-b451-4851-8c55-6455343f2291 service nova] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Updated VIF entry in instance network info cache for port befb57e6-1778-4d27-98ae-2f7611144dd2. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1548.252284] env[61573]: DEBUG nova.network.neutron [req-7b0dcec7-d555-46d4-a76d-05091b96f6a6 req-c3221f9c-b451-4851-8c55-6455343f2291 service nova] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Updating instance_info_cache with network_info: [{"id": "befb57e6-1778-4d27-98ae-2f7611144dd2", "address": "fa:16:3e:05:7c:c0", "network": {"id": "78bbca97-89ba-4b32-a8e8-f83e40d94593", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-851977150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8caf6003840413c8eff7d84d9b185cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbefb57e6-17", "ovs_interfaceid": "befb57e6-1778-4d27-98ae-2f7611144dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.262481] env[61573]: DEBUG oslo_concurrency.lockutils [req-7b0dcec7-d555-46d4-a76d-05091b96f6a6 req-c3221f9c-b451-4851-8c55-6455343f2291 service nova] Releasing lock "refresh_cache-d272f481-f590-46e9-9f51-0c7601ff34ce" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.871488] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquiring lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.538420] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "adb2282f-224e-4a56-abd8-cd91bd0023f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.199623] env[61573]: DEBUG oslo_concurrency.lockutils [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "d272f481-f590-46e9-9f51-0c7601ff34ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.400860] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.403815] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.405015] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.405381] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1591.405381] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1591.426835] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.427012] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.427129] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.427255] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.427377] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.427498] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.427646] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.427778] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.427899] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.428022] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1591.428146] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1592.403761] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1592.404128] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1592.417451] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.417806] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.417944] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.418125] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1592.419473] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60805be4-224a-4e41-a794-2b8179bb9e30 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.428423] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c57380-0593-4421-a98a-e4a0bd699205 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.444726] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d29b21-fa66-410d-b728-ce7196742b5d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.453188] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd1af1a-d6de-4097-945d-28e9b15798bc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.491568] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180544MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1592.491742] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.491946] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.609938] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 59913660-3644-41f2-a422-f814bd69b4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.610186] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.610357] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 1605911c-cf22-4206-b911-92b2a137dc84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.610486] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.610608] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.610725] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.610875] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.611033] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.611156] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.611270] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1592.623570] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1592.636881] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance bb3d67c0-0686-46cb-8670-6cf7267790f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1592.647969] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d50453ba-f4b2-4bd9-8545-c123f8f31878 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1592.658206] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance f29eb708-7f1b-4d3c-b932-ca9a1b346b3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1592.658448] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1592.658611] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '68', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'io_workload': '10', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_8df0e09d5e0344a78c770ce0fb3392ad': '1', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_5700bfb489004c86ac0b8ec509bd3758': '1', 'num_proj_c143aa5f5238459388ccd140702680ab': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1592.695960] env[61573]: WARNING oslo_vmware.rw_handles [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1592.695960] env[61573]: ERROR oslo_vmware.rw_handles [ 1592.696335] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/dc4a2df8-7bef-4e84-9d6c-57f2ab6daf5e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1592.698525] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1592.698786] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Copying Virtual Disk [datastore2] vmware_temp/dc4a2df8-7bef-4e84-9d6c-57f2ab6daf5e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/dc4a2df8-7bef-4e84-9d6c-57f2ab6daf5e/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1592.699080] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2403854-1b70-459d-a93b-bec27e63fe47 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.708695] env[61573]: DEBUG oslo_vmware.api [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Waiting for the task: (returnval){ [ 1592.708695] env[61573]: value = "task-4836874" [ 1592.708695] env[61573]: _type = "Task" [ 1592.708695] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.717271] env[61573]: DEBUG oslo_vmware.api [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Task: {'id': task-4836874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.881156] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ff4d2e-b650-4173-a1cc-2f36fe2fe416 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.889291] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba073d4-aaa0-4624-bfa2-5ed0d78b7d14 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.922372] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247ca0df-3283-4019-9acf-ace2d9e58802 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.931321] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3909cab1-ef89-46ac-a4d5-de0c00eeea32 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.945773] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1592.956711] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1592.975227] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1592.975227] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.481s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.218353] env[61573]: DEBUG oslo_vmware.exceptions [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1593.218621] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.219209] env[61573]: ERROR nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1593.219209] env[61573]: Faults: ['InvalidArgument'] [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Traceback (most recent call last): [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] yield resources [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] self.driver.spawn(context, instance, image_meta, [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] self._fetch_image_if_missing(context, vi) [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] image_cache(vi, tmp_image_ds_loc) [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] vm_util.copy_virtual_disk( [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] session._wait_for_task(vmdk_copy_task) [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] return self.wait_for_task(task_ref) [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] return evt.wait() [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] result = hub.switch() [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] return self.greenlet.switch() [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] self.f(*self.args, **self.kw) [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] raise exceptions.translate_fault(task_info.error) [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Faults: ['InvalidArgument'] [ 1593.219209] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] [ 1593.220268] env[61573]: INFO nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Terminating instance [ 1593.221185] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.221336] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1593.221575] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc9718c5-4d0b-42e5-b3be-e334ce86dc95 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.223809] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "refresh_cache-1605911c-cf22-4206-b911-92b2a137dc84" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.223968] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquired lock "refresh_cache-1605911c-cf22-4206-b911-92b2a137dc84" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.224149] env[61573]: DEBUG nova.network.neutron [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1593.231305] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1593.231489] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1593.232639] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a8c8930-e599-45c4-87ff-b770314d0e3c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.240044] env[61573]: DEBUG oslo_vmware.api [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 1593.240044] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52442530-1fbc-6677-de6e-3e59481420c5" [ 1593.240044] env[61573]: _type = "Task" [ 1593.240044] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.255042] env[61573]: DEBUG nova.network.neutron [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1593.256762] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1593.256993] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating directory with path [datastore2] vmware_temp/7da70aed-3a47-4728-bd1b-3e3852740c7a/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1593.257243] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f408cb68-9224-4655-9e65-7d9da38e0d48 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.279365] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Created directory with path [datastore2] vmware_temp/7da70aed-3a47-4728-bd1b-3e3852740c7a/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1593.279565] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Fetch image to [datastore2] vmware_temp/7da70aed-3a47-4728-bd1b-3e3852740c7a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1593.279753] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/7da70aed-3a47-4728-bd1b-3e3852740c7a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1593.280521] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac40e32-f9bc-4b57-aa74-e8fa8a265d3d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.287373] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5244fc-d9f4-4867-a828-22e0f2a8a612 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.297251] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445e3628-759a-4eeb-81e1-389d62b6835e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.329473] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ec367c-5612-49e5-ae2f-fd17b1531f6c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.336666] env[61573]: DEBUG nova.network.neutron [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.338192] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9628a4d6-d641-493d-8bc3-ca1b74838920 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.346339] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Releasing lock "refresh_cache-1605911c-cf22-4206-b911-92b2a137dc84" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.346847] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1593.347105] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1593.348231] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f7e148-3f7c-436b-8caa-fec1936a3dc6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.356293] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1593.356532] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d041113-12cf-46fe-b356-b72d3e16cde4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.359549] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1593.390714] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1593.391011] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1593.391221] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Deleting the datastore file [datastore2] 1605911c-cf22-4206-b911-92b2a137dc84 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1593.391503] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ece6f56e-b819-49dc-914b-ed663a5c6adb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.398650] env[61573]: DEBUG oslo_vmware.api [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Waiting for the task: (returnval){ [ 1593.398650] env[61573]: value = "task-4836876" [ 1593.398650] env[61573]: _type = "Task" [ 1593.398650] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.409014] env[61573]: DEBUG oslo_vmware.api [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Task: {'id': task-4836876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.418475] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7da70aed-3a47-4728-bd1b-3e3852740c7a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1593.477894] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1593.478066] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7da70aed-3a47-4728-bd1b-3e3852740c7a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1593.908752] env[61573]: DEBUG oslo_vmware.api [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Task: {'id': task-4836876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036666} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.909092] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1593.909289] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1593.909460] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1593.909634] env[61573]: INFO nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1593.909941] env[61573]: DEBUG oslo.service.loopingcall [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1593.910163] env[61573]: DEBUG nova.compute.manager [-] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Skipping network deallocation for instance since networking was not requested. {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1593.912357] env[61573]: DEBUG nova.compute.claims [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1593.912522] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.912732] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.968264] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.968561] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.968679] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.968843] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1594.146956] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892a0f1b-e2ed-4fb4-9a85-f2a9d6b86bbf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.155078] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e26c57-33c1-4647-beeb-fd294db19cf0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.187278] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d482fd-04f5-4c05-9be8-a792b082a0a2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.195611] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631a6ea3-d302-4f30-81fe-fa7f9f5e1c8a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.209763] env[61573]: DEBUG nova.compute.provider_tree [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1594.221044] env[61573]: DEBUG nova.scheduler.client.report [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1594.235831] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.323s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.236409] env[61573]: ERROR nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1594.236409] env[61573]: Faults: ['InvalidArgument'] [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Traceback (most recent call last): [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] self.driver.spawn(context, instance, image_meta, [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] self._fetch_image_if_missing(context, vi) [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] image_cache(vi, tmp_image_ds_loc) [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] vm_util.copy_virtual_disk( [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] session._wait_for_task(vmdk_copy_task) [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] return self.wait_for_task(task_ref) [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] return evt.wait() [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] result = hub.switch() [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] return self.greenlet.switch() [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] self.f(*self.args, **self.kw) [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] raise exceptions.translate_fault(task_info.error) [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Faults: ['InvalidArgument'] [ 1594.236409] env[61573]: ERROR nova.compute.manager [instance: 1605911c-cf22-4206-b911-92b2a137dc84] [ 1594.237290] env[61573]: DEBUG nova.compute.utils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1594.238784] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Build of instance 1605911c-cf22-4206-b911-92b2a137dc84 was re-scheduled: A specified parameter was not correct: fileType [ 1594.238784] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1594.239173] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1594.239402] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "refresh_cache-1605911c-cf22-4206-b911-92b2a137dc84" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.239552] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquired lock "refresh_cache-1605911c-cf22-4206-b911-92b2a137dc84" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.239768] env[61573]: DEBUG nova.network.neutron [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1594.268325] env[61573]: DEBUG nova.network.neutron [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1594.344371] env[61573]: DEBUG nova.network.neutron [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.354675] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Releasing lock "refresh_cache-1605911c-cf22-4206-b911-92b2a137dc84" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.354889] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1594.355093] env[61573]: DEBUG nova.compute.manager [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Skipping network deallocation for instance since networking was not requested. {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1594.449051] env[61573]: INFO nova.scheduler.client.report [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Deleted allocations for instance 1605911c-cf22-4206-b911-92b2a137dc84 [ 1594.475387] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7297e431-71d3-4f3b-b93b-a15b41b0473b tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "1605911c-cf22-4206-b911-92b2a137dc84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 562.533s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.476548] env[61573]: DEBUG oslo_concurrency.lockutils [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "1605911c-cf22-4206-b911-92b2a137dc84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 365.676s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.476785] env[61573]: DEBUG oslo_concurrency.lockutils [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "1605911c-cf22-4206-b911-92b2a137dc84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.477315] env[61573]: DEBUG oslo_concurrency.lockutils [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "1605911c-cf22-4206-b911-92b2a137dc84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.477315] env[61573]: DEBUG oslo_concurrency.lockutils [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "1605911c-cf22-4206-b911-92b2a137dc84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.479166] env[61573]: INFO nova.compute.manager [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Terminating instance [ 1594.481174] env[61573]: DEBUG oslo_concurrency.lockutils [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquiring lock "refresh_cache-1605911c-cf22-4206-b911-92b2a137dc84" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.481330] env[61573]: DEBUG oslo_concurrency.lockutils [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Acquired lock "refresh_cache-1605911c-cf22-4206-b911-92b2a137dc84" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.481501] env[61573]: DEBUG nova.network.neutron [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1594.490354] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1594.510024] env[61573]: DEBUG nova.network.neutron [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1594.542369] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.542369] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.543828] env[61573]: INFO nova.compute.claims [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1594.580079] env[61573]: DEBUG nova.network.neutron [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.589609] env[61573]: DEBUG oslo_concurrency.lockutils [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Releasing lock "refresh_cache-1605911c-cf22-4206-b911-92b2a137dc84" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.589997] env[61573]: DEBUG nova.compute.manager [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1594.590203] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1594.590688] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5bdf1a8c-e502-4b21-9a33-10cf4c431e7f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.601659] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d369f8-fc3d-45fd-a7db-74a4d0ecd4e7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.637081] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1605911c-cf22-4206-b911-92b2a137dc84 could not be found. [ 1594.637305] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1594.637483] env[61573]: INFO nova.compute.manager [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1594.637749] env[61573]: DEBUG oslo.service.loopingcall [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1594.640344] env[61573]: DEBUG nova.compute.manager [-] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1594.640498] env[61573]: DEBUG nova.network.neutron [-] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1594.658315] env[61573]: DEBUG nova.network.neutron [-] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1594.667122] env[61573]: DEBUG nova.network.neutron [-] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.680210] env[61573]: INFO nova.compute.manager [-] [instance: 1605911c-cf22-4206-b911-92b2a137dc84] Took 0.04 seconds to deallocate network for instance. [ 1594.785146] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8ec7b7-5dfa-48ac-961f-90fd80811873 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.793574] env[61573]: DEBUG oslo_concurrency.lockutils [None req-190dc855-45a5-48ac-8fa3-c120c29796e7 tempest-ServerShowV254Test-2111009966 tempest-ServerShowV254Test-2111009966-project-member] Lock "1605911c-cf22-4206-b911-92b2a137dc84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.317s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.795233] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d283a9-52d8-47a2-b2bd-0207edabcad5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.828492] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49beab2-4f0e-4a9e-b0d9-71fbc5352287 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.837104] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656425db-7b32-4661-82a7-c30611cf2158 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.853161] env[61573]: DEBUG nova.compute.provider_tree [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1594.861764] env[61573]: DEBUG nova.scheduler.client.report [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1594.875316] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.333s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.875818] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1594.911037] env[61573]: DEBUG nova.compute.utils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1594.913056] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1594.913056] env[61573]: DEBUG nova.network.neutron [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1594.923187] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1594.981901] env[61573]: DEBUG nova.policy [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e732fd3fda94810ae67476e3decf066', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33f496dcb85142ae9da6ddf8e8e9e7c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1594.987745] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1595.012545] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1595.012781] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1595.012937] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1595.013133] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1595.013279] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1595.013423] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1595.013628] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1595.013786] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1595.014041] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1595.014215] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1595.014387] env[61573]: DEBUG nova.virt.hardware [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1595.015248] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef01775-6738-4136-bfd8-a8b274ba3451 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.024365] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69028959-843f-4ea7-b1ff-9ebfd5e1e88e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.382551] env[61573]: DEBUG nova.network.neutron [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Successfully created port: 5c4b120a-e49e-4680-9c66-b33c4d881525 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1595.404108] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.404108] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.202185] env[61573]: DEBUG nova.compute.manager [req-626d45ef-b7fb-4e59-96bc-1293730e1548 req-2131af68-8863-4da9-b686-3c77f46283f2 service nova] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Received event network-vif-plugged-5c4b120a-e49e-4680-9c66-b33c4d881525 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1596.202807] env[61573]: DEBUG oslo_concurrency.lockutils [req-626d45ef-b7fb-4e59-96bc-1293730e1548 req-2131af68-8863-4da9-b686-3c77f46283f2 service nova] Acquiring lock "c96755a9-1e1c-42ed-a170-35914ef05333-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.203112] env[61573]: DEBUG oslo_concurrency.lockutils [req-626d45ef-b7fb-4e59-96bc-1293730e1548 req-2131af68-8863-4da9-b686-3c77f46283f2 service nova] Lock "c96755a9-1e1c-42ed-a170-35914ef05333-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.203228] env[61573]: DEBUG oslo_concurrency.lockutils [req-626d45ef-b7fb-4e59-96bc-1293730e1548 req-2131af68-8863-4da9-b686-3c77f46283f2 service nova] Lock "c96755a9-1e1c-42ed-a170-35914ef05333-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.203397] env[61573]: DEBUG nova.compute.manager [req-626d45ef-b7fb-4e59-96bc-1293730e1548 req-2131af68-8863-4da9-b686-3c77f46283f2 service nova] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] No waiting events found dispatching network-vif-plugged-5c4b120a-e49e-4680-9c66-b33c4d881525 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1596.203564] env[61573]: WARNING nova.compute.manager [req-626d45ef-b7fb-4e59-96bc-1293730e1548 req-2131af68-8863-4da9-b686-3c77f46283f2 service nova] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Received unexpected event network-vif-plugged-5c4b120a-e49e-4680-9c66-b33c4d881525 for instance with vm_state building and task_state spawning. [ 1596.302974] env[61573]: DEBUG nova.network.neutron [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Successfully updated port: 5c4b120a-e49e-4680-9c66-b33c4d881525 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1596.314201] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquiring lock "refresh_cache-c96755a9-1e1c-42ed-a170-35914ef05333" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.314353] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquired lock "refresh_cache-c96755a9-1e1c-42ed-a170-35914ef05333" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.314502] env[61573]: DEBUG nova.network.neutron [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1596.393493] env[61573]: DEBUG nova.network.neutron [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1596.586585] env[61573]: DEBUG nova.network.neutron [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Updating instance_info_cache with network_info: [{"id": "5c4b120a-e49e-4680-9c66-b33c4d881525", "address": "fa:16:3e:ac:3b:5d", "network": {"id": "cd630997-8650-43e2-9d62-ec505d87aa77", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-47003848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33f496dcb85142ae9da6ddf8e8e9e7c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c4b120a-e4", "ovs_interfaceid": "5c4b120a-e49e-4680-9c66-b33c4d881525", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.603245] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Releasing lock "refresh_cache-c96755a9-1e1c-42ed-a170-35914ef05333" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.603567] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Instance network_info: |[{"id": "5c4b120a-e49e-4680-9c66-b33c4d881525", "address": "fa:16:3e:ac:3b:5d", "network": {"id": "cd630997-8650-43e2-9d62-ec505d87aa77", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-47003848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33f496dcb85142ae9da6ddf8e8e9e7c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c4b120a-e4", "ovs_interfaceid": "5c4b120a-e49e-4680-9c66-b33c4d881525", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1596.603999] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:3b:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2321dbbe-f64a-4253-a462-21676f8a278e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c4b120a-e49e-4680-9c66-b33c4d881525', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1596.611690] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Creating folder: Project (33f496dcb85142ae9da6ddf8e8e9e7c9). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1596.612397] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1fce6458-1995-4e1b-8088-3d6353d539aa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.623298] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Created folder: Project (33f496dcb85142ae9da6ddf8e8e9e7c9) in parent group-v942801. [ 1596.623489] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Creating folder: Instances. Parent ref: group-v942896. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1596.623726] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc94c4af-8015-421a-af11-36500560f999 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.632229] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Created folder: Instances in parent group-v942896. [ 1596.632466] env[61573]: DEBUG oslo.service.loopingcall [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1596.632646] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1596.632852] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f83dfcf-faa0-4aca-b115-79919b3812a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.652504] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1596.652504] env[61573]: value = "task-4836879" [ 1596.652504] env[61573]: _type = "Task" [ 1596.652504] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.662946] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836879, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.164199] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836879, 'name': CreateVM_Task, 'duration_secs': 0.283884} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.164372] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1597.164989] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.165174] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.165488] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1597.165730] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d808b65-7480-4000-b3e1-8ffb547e86ef {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.170370] env[61573]: DEBUG oslo_vmware.api [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Waiting for the task: (returnval){ [ 1597.170370] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5268f66f-7fcc-901d-6bf0-45f58e2bfd86" [ 1597.170370] env[61573]: _type = "Task" [ 1597.170370] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.179195] env[61573]: DEBUG oslo_vmware.api [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5268f66f-7fcc-901d-6bf0-45f58e2bfd86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.682205] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.682585] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1597.682727] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.230468] env[61573]: DEBUG nova.compute.manager [req-b5c2e310-c7c1-4fea-9000-62ae02d8c157 req-77ad1bef-bc76-48d9-b640-604af014a9c5 service nova] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Received event network-changed-5c4b120a-e49e-4680-9c66-b33c4d881525 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1598.230537] env[61573]: DEBUG nova.compute.manager [req-b5c2e310-c7c1-4fea-9000-62ae02d8c157 req-77ad1bef-bc76-48d9-b640-604af014a9c5 service nova] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Refreshing instance network info cache due to event network-changed-5c4b120a-e49e-4680-9c66-b33c4d881525. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1598.230831] env[61573]: DEBUG oslo_concurrency.lockutils [req-b5c2e310-c7c1-4fea-9000-62ae02d8c157 req-77ad1bef-bc76-48d9-b640-604af014a9c5 service nova] Acquiring lock "refresh_cache-c96755a9-1e1c-42ed-a170-35914ef05333" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.230888] env[61573]: DEBUG oslo_concurrency.lockutils [req-b5c2e310-c7c1-4fea-9000-62ae02d8c157 req-77ad1bef-bc76-48d9-b640-604af014a9c5 service nova] Acquired lock "refresh_cache-c96755a9-1e1c-42ed-a170-35914ef05333" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.231070] env[61573]: DEBUG nova.network.neutron [req-b5c2e310-c7c1-4fea-9000-62ae02d8c157 req-77ad1bef-bc76-48d9-b640-604af014a9c5 service nova] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Refreshing network info cache for port 5c4b120a-e49e-4680-9c66-b33c4d881525 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1598.534440] env[61573]: DEBUG nova.network.neutron [req-b5c2e310-c7c1-4fea-9000-62ae02d8c157 req-77ad1bef-bc76-48d9-b640-604af014a9c5 service nova] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Updated VIF entry in instance network info cache for port 5c4b120a-e49e-4680-9c66-b33c4d881525. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1598.534842] env[61573]: DEBUG nova.network.neutron [req-b5c2e310-c7c1-4fea-9000-62ae02d8c157 req-77ad1bef-bc76-48d9-b640-604af014a9c5 service nova] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Updating instance_info_cache with network_info: [{"id": "5c4b120a-e49e-4680-9c66-b33c4d881525", "address": "fa:16:3e:ac:3b:5d", "network": {"id": "cd630997-8650-43e2-9d62-ec505d87aa77", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-47003848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33f496dcb85142ae9da6ddf8e8e9e7c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c4b120a-e4", "ovs_interfaceid": "5c4b120a-e49e-4680-9c66-b33c4d881525", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.545453] env[61573]: DEBUG oslo_concurrency.lockutils [req-b5c2e310-c7c1-4fea-9000-62ae02d8c157 req-77ad1bef-bc76-48d9-b640-604af014a9c5 service nova] Releasing lock "refresh_cache-c96755a9-1e1c-42ed-a170-35914ef05333" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.807681] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquiring lock "c96755a9-1e1c-42ed-a170-35914ef05333" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.637381] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.637775] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.055708] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "04d29fd0-acd8-407a-8b53-341c78e7b341" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.056063] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.465856] env[61573]: WARNING oslo_vmware.rw_handles [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1643.465856] env[61573]: ERROR oslo_vmware.rw_handles [ 1643.466655] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/7da70aed-3a47-4728-bd1b-3e3852740c7a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1643.468353] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1643.468642] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Copying Virtual Disk [datastore2] vmware_temp/7da70aed-3a47-4728-bd1b-3e3852740c7a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/7da70aed-3a47-4728-bd1b-3e3852740c7a/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1643.468942] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-737a0eba-7680-417d-b333-7d97c536cd87 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.477464] env[61573]: DEBUG oslo_vmware.api [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 1643.477464] env[61573]: value = "task-4836880" [ 1643.477464] env[61573]: _type = "Task" [ 1643.477464] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.486539] env[61573]: DEBUG oslo_vmware.api [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': task-4836880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.987648] env[61573]: DEBUG oslo_vmware.exceptions [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1643.987941] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.988523] env[61573]: ERROR nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1643.988523] env[61573]: Faults: ['InvalidArgument'] [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Traceback (most recent call last): [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] yield resources [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] self.driver.spawn(context, instance, image_meta, [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] self._fetch_image_if_missing(context, vi) [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] image_cache(vi, tmp_image_ds_loc) [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] vm_util.copy_virtual_disk( [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] session._wait_for_task(vmdk_copy_task) [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] return self.wait_for_task(task_ref) [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] return evt.wait() [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] result = hub.switch() [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] return self.greenlet.switch() [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] self.f(*self.args, **self.kw) [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] raise exceptions.translate_fault(task_info.error) [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Faults: ['InvalidArgument'] [ 1643.988523] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] [ 1643.989455] env[61573]: INFO nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Terminating instance [ 1643.990466] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.990638] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1643.990880] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de846573-b8d0-40cb-85df-ff71edeea042 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.993391] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1643.993575] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1643.994318] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a0196a-8525-4a3f-b67a-bc4197505c58 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.001566] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1644.001799] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c64ad8d-899a-4e2e-b09e-e233498b4141 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.004111] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1644.004284] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1644.005260] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dac4ed5c-5039-4b04-b4f5-6f75d7f75e7f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.010716] env[61573]: DEBUG oslo_vmware.api [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 1644.010716] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b31e5c-12aa-b90e-dea6-04eaa6fcaf63" [ 1644.010716] env[61573]: _type = "Task" [ 1644.010716] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.019196] env[61573]: DEBUG oslo_vmware.api [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b31e5c-12aa-b90e-dea6-04eaa6fcaf63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.081476] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1644.081706] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1644.081871] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Deleting the datastore file [datastore2] 59913660-3644-41f2-a422-f814bd69b4a1 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1644.082166] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be452da0-4f6f-48d9-8bc6-62f2c991fcb0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.088867] env[61573]: DEBUG oslo_vmware.api [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 1644.088867] env[61573]: value = "task-4836882" [ 1644.088867] env[61573]: _type = "Task" [ 1644.088867] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.097240] env[61573]: DEBUG oslo_vmware.api [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': task-4836882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.521475] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1644.521849] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating directory with path [datastore2] vmware_temp/7e5239b2-1ff1-4d4a-bcc5-5ec5d3e85568/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1644.521991] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41244dab-0a42-45a6-b56d-8c0cdf70fb92 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.534367] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Created directory with path [datastore2] vmware_temp/7e5239b2-1ff1-4d4a-bcc5-5ec5d3e85568/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1644.534577] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Fetch image to [datastore2] vmware_temp/7e5239b2-1ff1-4d4a-bcc5-5ec5d3e85568/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1644.534871] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/7e5239b2-1ff1-4d4a-bcc5-5ec5d3e85568/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1644.535696] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5106f8-8984-4e65-8974-633cd5ed588a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.543660] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ba747b-0bda-44c0-b91a-9f92c47bb845 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.552622] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b66e621-d1c5-48fa-9fcd-38a1bf173621 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.584252] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a915dde-1345-4d20-922d-8207aa2760c9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.593706] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7e4eb2a8-f88d-4f43-b0bf-12c7f004133d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.600678] env[61573]: DEBUG oslo_vmware.api [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': task-4836882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077749} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.600920] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1644.601142] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1644.601312] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1644.601483] env[61573]: INFO nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1644.603723] env[61573]: DEBUG nova.compute.claims [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1644.603897] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.604150] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.615507] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1644.671357] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e5239b2-1ff1-4d4a-bcc5-5ec5d3e85568/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1644.731021] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1644.731133] env[61573]: DEBUG oslo_vmware.rw_handles [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e5239b2-1ff1-4d4a-bcc5-5ec5d3e85568/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1644.885799] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934fa472-fb9d-4594-81c5-5a9f63c2fa5d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.894355] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76189c0-1151-43b0-8f22-548b7a1566e6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.925045] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c40d7a-4772-44e7-b287-db3a84076d4e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.932956] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b27a8bf-f385-4b08-9c01-bc2c3badecd9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.947115] env[61573]: DEBUG nova.compute.provider_tree [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1644.958755] env[61573]: DEBUG nova.scheduler.client.report [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1644.974398] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.370s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.974929] env[61573]: ERROR nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1644.974929] env[61573]: Faults: ['InvalidArgument'] [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Traceback (most recent call last): [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] self.driver.spawn(context, instance, image_meta, [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] self._fetch_image_if_missing(context, vi) [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] image_cache(vi, tmp_image_ds_loc) [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] vm_util.copy_virtual_disk( [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] session._wait_for_task(vmdk_copy_task) [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] return self.wait_for_task(task_ref) [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] return evt.wait() [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] result = hub.switch() [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] return self.greenlet.switch() [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] self.f(*self.args, **self.kw) [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] raise exceptions.translate_fault(task_info.error) [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Faults: ['InvalidArgument'] [ 1644.974929] env[61573]: ERROR nova.compute.manager [instance: 59913660-3644-41f2-a422-f814bd69b4a1] [ 1644.975800] env[61573]: DEBUG nova.compute.utils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1644.977122] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Build of instance 59913660-3644-41f2-a422-f814bd69b4a1 was re-scheduled: A specified parameter was not correct: fileType [ 1644.977122] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1644.977496] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1644.977668] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1644.977847] env[61573]: DEBUG nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1644.978031] env[61573]: DEBUG nova.network.neutron [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1645.313875] env[61573]: DEBUG nova.network.neutron [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.326134] env[61573]: INFO nova.compute.manager [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Took 0.35 seconds to deallocate network for instance. [ 1645.434271] env[61573]: INFO nova.scheduler.client.report [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Deleted allocations for instance 59913660-3644-41f2-a422-f814bd69b4a1 [ 1645.457346] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ce3703d2-0eb2-4781-82e0-2241e79d60b0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "59913660-3644-41f2-a422-f814bd69b4a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.772s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.458566] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "59913660-3644-41f2-a422-f814bd69b4a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.272s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.458955] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "59913660-3644-41f2-a422-f814bd69b4a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.459050] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "59913660-3644-41f2-a422-f814bd69b4a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.459172] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "59913660-3644-41f2-a422-f814bd69b4a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.461782] env[61573]: INFO nova.compute.manager [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Terminating instance [ 1645.463642] env[61573]: DEBUG nova.compute.manager [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1645.463843] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1645.464333] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f299ed15-cd5b-4487-8e09-44ba96824dcb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.474023] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a93f6a1-c2c5-45ac-abfc-45d4d2219bc3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.485983] env[61573]: DEBUG nova.compute.manager [None req-46ace650-85a4-4527-82d5-06f1a1c1a4fc tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: bb3d67c0-0686-46cb-8670-6cf7267790f6] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1645.510952] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59913660-3644-41f2-a422-f814bd69b4a1 could not be found. [ 1645.511261] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1645.511487] env[61573]: INFO nova.compute.manager [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1645.511664] env[61573]: DEBUG oslo.service.loopingcall [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1645.511933] env[61573]: DEBUG nova.compute.manager [-] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1645.512144] env[61573]: DEBUG nova.network.neutron [-] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1645.523814] env[61573]: DEBUG nova.compute.manager [None req-46ace650-85a4-4527-82d5-06f1a1c1a4fc tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: bb3d67c0-0686-46cb-8670-6cf7267790f6] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1645.545940] env[61573]: DEBUG oslo_concurrency.lockutils [None req-46ace650-85a4-4527-82d5-06f1a1c1a4fc tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "bb3d67c0-0686-46cb-8670-6cf7267790f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.106s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.546086] env[61573]: DEBUG nova.network.neutron [-] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.554443] env[61573]: INFO nova.compute.manager [-] [instance: 59913660-3644-41f2-a422-f814bd69b4a1] Took 0.04 seconds to deallocate network for instance. [ 1645.559591] env[61573]: DEBUG nova.compute.manager [None req-ed93f987-8820-4de0-8e6a-095f26c86fe2 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: d50453ba-f4b2-4bd9-8545-c123f8f31878] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1645.586873] env[61573]: DEBUG nova.compute.manager [None req-ed93f987-8820-4de0-8e6a-095f26c86fe2 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] [instance: d50453ba-f4b2-4bd9-8545-c123f8f31878] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1645.609243] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ed93f987-8820-4de0-8e6a-095f26c86fe2 tempest-AttachInterfacesTestJSON-1820478386 tempest-AttachInterfacesTestJSON-1820478386-project-member] Lock "d50453ba-f4b2-4bd9-8545-c123f8f31878" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.336s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.621617] env[61573]: DEBUG nova.compute.manager [None req-8c2012a9-f361-43fb-b999-6c174fee98db tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: f29eb708-7f1b-4d3c-b932-ca9a1b346b3e] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1645.679038] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0501e1b9-e716-4a3f-a396-3e49babef070 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "59913660-3644-41f2-a422-f814bd69b4a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.220s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.684326] env[61573]: DEBUG nova.compute.manager [None req-8c2012a9-f361-43fb-b999-6c174fee98db tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: f29eb708-7f1b-4d3c-b932-ca9a1b346b3e] Instance disappeared before build. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1645.704657] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8c2012a9-f361-43fb-b999-6c174fee98db tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "f29eb708-7f1b-4d3c-b932-ca9a1b346b3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.948s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.714999] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1645.773462] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.773736] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.775414] env[61573]: INFO nova.compute.claims [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1646.025422] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ae3019-c308-478c-b9cd-4060f247f4af {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.033036] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a834dfcc-26d9-4120-8307-54379b137693 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.063734] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c391213b-3964-4cd1-8693-88bf920c4fbd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.071770] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddad40b-165a-4c9b-af2e-840911c7cd5f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.085303] env[61573]: DEBUG nova.compute.provider_tree [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.094900] env[61573]: DEBUG nova.scheduler.client.report [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1646.109034] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.335s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.109549] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1646.148713] env[61573]: DEBUG nova.compute.utils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1646.150058] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1646.150265] env[61573]: DEBUG nova.network.neutron [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1646.158833] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1646.209245] env[61573]: DEBUG nova.policy [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e209cfe158004e46a9693c62a5c2e3f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90325af48fc44451a6c15e089107271a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1646.224670] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1646.252368] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1646.252731] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1646.252940] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1646.253187] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1646.253344] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1646.253492] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1646.253711] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1646.253896] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1646.254049] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1646.254217] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1646.254398] env[61573]: DEBUG nova.virt.hardware [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1646.255371] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150494c8-882c-4136-bb2f-a5cb59be06da {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.263663] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47eedc81-1e82-4e22-83b2-22ac55d4910d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.574881] env[61573]: DEBUG nova.network.neutron [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Successfully created port: d41c2247-2d2c-42f8-ace2-ae01e43c558d {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1647.353359] env[61573]: DEBUG nova.network.neutron [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Successfully updated port: d41c2247-2d2c-42f8-ace2-ae01e43c558d {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1647.367929] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "refresh_cache-6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.368107] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "refresh_cache-6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.368277] env[61573]: DEBUG nova.network.neutron [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1647.371292] env[61573]: DEBUG nova.compute.manager [req-ce68ea08-4d6d-4c71-ba95-1505a04a4ab8 req-86ac92d1-79ee-4576-b234-cceb78af6bcd service nova] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Received event network-vif-plugged-d41c2247-2d2c-42f8-ace2-ae01e43c558d {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1647.371541] env[61573]: DEBUG oslo_concurrency.lockutils [req-ce68ea08-4d6d-4c71-ba95-1505a04a4ab8 req-86ac92d1-79ee-4576-b234-cceb78af6bcd service nova] Acquiring lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.371760] env[61573]: DEBUG oslo_concurrency.lockutils [req-ce68ea08-4d6d-4c71-ba95-1505a04a4ab8 req-86ac92d1-79ee-4576-b234-cceb78af6bcd service nova] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.371860] env[61573]: DEBUG oslo_concurrency.lockutils [req-ce68ea08-4d6d-4c71-ba95-1505a04a4ab8 req-86ac92d1-79ee-4576-b234-cceb78af6bcd service nova] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.372047] env[61573]: DEBUG nova.compute.manager [req-ce68ea08-4d6d-4c71-ba95-1505a04a4ab8 req-86ac92d1-79ee-4576-b234-cceb78af6bcd service nova] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] No waiting events found dispatching network-vif-plugged-d41c2247-2d2c-42f8-ace2-ae01e43c558d {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1647.372261] env[61573]: WARNING nova.compute.manager [req-ce68ea08-4d6d-4c71-ba95-1505a04a4ab8 req-86ac92d1-79ee-4576-b234-cceb78af6bcd service nova] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Received unexpected event network-vif-plugged-d41c2247-2d2c-42f8-ace2-ae01e43c558d for instance with vm_state building and task_state spawning. [ 1647.437457] env[61573]: DEBUG nova.network.neutron [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1647.646299] env[61573]: DEBUG nova.network.neutron [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Updating instance_info_cache with network_info: [{"id": "d41c2247-2d2c-42f8-ace2-ae01e43c558d", "address": "fa:16:3e:bc:e1:73", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd41c2247-2d", "ovs_interfaceid": "d41c2247-2d2c-42f8-ace2-ae01e43c558d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.659492] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "refresh_cache-6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.659634] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Instance network_info: |[{"id": "d41c2247-2d2c-42f8-ace2-ae01e43c558d", "address": "fa:16:3e:bc:e1:73", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd41c2247-2d", "ovs_interfaceid": "d41c2247-2d2c-42f8-ace2-ae01e43c558d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1647.660047] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:e1:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd41c2247-2d2c-42f8-ace2-ae01e43c558d', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1647.667771] env[61573]: DEBUG oslo.service.loopingcall [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1647.668390] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1647.668635] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17b1ad37-d48d-4a3d-a40e-983e5c129cf5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.690394] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1647.690394] env[61573]: value = "task-4836883" [ 1647.690394] env[61573]: _type = "Task" [ 1647.690394] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.700230] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836883, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.202538] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836883, 'name': CreateVM_Task, 'duration_secs': 0.303437} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.202751] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1648.203480] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.203689] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.204074] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1648.204339] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe8f5733-05ff-4637-899e-aa70cb27ecc9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.209423] env[61573]: DEBUG oslo_vmware.api [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 1648.209423] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b5b6e7-faaf-d528-268e-dcc930d45297" [ 1648.209423] env[61573]: _type = "Task" [ 1648.209423] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.217589] env[61573]: DEBUG oslo_vmware.api [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b5b6e7-faaf-d528-268e-dcc930d45297, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.722390] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.722720] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1648.722866] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.396995] env[61573]: DEBUG nova.compute.manager [req-4c44226d-829d-4b00-8d84-977f9a2528bc req-49134956-d5e6-4b3a-bd43-80620b1e2c0c service nova] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Received event network-changed-d41c2247-2d2c-42f8-ace2-ae01e43c558d {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1649.397226] env[61573]: DEBUG nova.compute.manager [req-4c44226d-829d-4b00-8d84-977f9a2528bc req-49134956-d5e6-4b3a-bd43-80620b1e2c0c service nova] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Refreshing instance network info cache due to event network-changed-d41c2247-2d2c-42f8-ace2-ae01e43c558d. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1649.397436] env[61573]: DEBUG oslo_concurrency.lockutils [req-4c44226d-829d-4b00-8d84-977f9a2528bc req-49134956-d5e6-4b3a-bd43-80620b1e2c0c service nova] Acquiring lock "refresh_cache-6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.397580] env[61573]: DEBUG oslo_concurrency.lockutils [req-4c44226d-829d-4b00-8d84-977f9a2528bc req-49134956-d5e6-4b3a-bd43-80620b1e2c0c service nova] Acquired lock "refresh_cache-6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.397739] env[61573]: DEBUG nova.network.neutron [req-4c44226d-829d-4b00-8d84-977f9a2528bc req-49134956-d5e6-4b3a-bd43-80620b1e2c0c service nova] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Refreshing network info cache for port d41c2247-2d2c-42f8-ace2-ae01e43c558d {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1649.403030] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.911845] env[61573]: DEBUG nova.network.neutron [req-4c44226d-829d-4b00-8d84-977f9a2528bc req-49134956-d5e6-4b3a-bd43-80620b1e2c0c service nova] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Updated VIF entry in instance network info cache for port d41c2247-2d2c-42f8-ace2-ae01e43c558d. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1649.912246] env[61573]: DEBUG nova.network.neutron [req-4c44226d-829d-4b00-8d84-977f9a2528bc req-49134956-d5e6-4b3a-bd43-80620b1e2c0c service nova] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Updating instance_info_cache with network_info: [{"id": "d41c2247-2d2c-42f8-ace2-ae01e43c558d", "address": "fa:16:3e:bc:e1:73", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd41c2247-2d", "ovs_interfaceid": "d41c2247-2d2c-42f8-ace2-ae01e43c558d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.922621] env[61573]: DEBUG oslo_concurrency.lockutils [req-4c44226d-829d-4b00-8d84-977f9a2528bc req-49134956-d5e6-4b3a-bd43-80620b1e2c0c service nova] Releasing lock "refresh_cache-6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.404751] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1652.404751] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1652.405175] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1652.429266] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.429460] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.429670] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.429732] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.429857] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.429982] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.430156] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.430446] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.430839] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.431031] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1652.431227] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1653.403991] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.404280] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.418150] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.418150] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.418541] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.418541] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1653.419563] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e83001-53a0-42a9-a3ba-53654db35754 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.428535] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbf992b-333b-4aa7-9009-208bf884fcf0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.443705] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60edb2f2-6a81-4c82-8610-f3441f09aefa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.450757] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b33b2cb-3e4b-4734-8d55-b39ed435b45d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.479811] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180552MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1653.480104] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.480206] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.582427] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.582638] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.582781] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.582905] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.583088] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.583254] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.583379] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.583496] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.583612] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.583756] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1653.595366] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1653.595538] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1653.595699] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '73', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'io_workload': '10', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_5700bfb489004c86ac0b8ec509bd3758': '1', 'num_proj_c143aa5f5238459388ccd140702680ab': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_33f496dcb85142ae9da6ddf8e8e9e7c9': '1', 'num_task_spawning': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1653.740102] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e79709d-c531-4ab3-a70a-aab90b9521d6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.748200] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459c7f54-b6e1-4bc7-82c6-544d52182cf5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.778930] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc86953a-91e6-47dc-af46-f53ce0ec83dd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.787568] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b889659-3a84-46db-89fb-fe928a1d035d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.800936] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1653.809817] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1653.825242] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1653.825441] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.345s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.820832] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.821206] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.821258] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.821415] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.821566] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.821711] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1693.486719] env[61573]: WARNING oslo_vmware.rw_handles [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1693.486719] env[61573]: ERROR oslo_vmware.rw_handles [ 1693.486719] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/7e5239b2-1ff1-4d4a-bcc5-5ec5d3e85568/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1693.488908] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1693.489158] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Copying Virtual Disk [datastore2] vmware_temp/7e5239b2-1ff1-4d4a-bcc5-5ec5d3e85568/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/7e5239b2-1ff1-4d4a-bcc5-5ec5d3e85568/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1693.489446] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7081880c-9c1a-458c-abf9-9d3042102247 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.498343] env[61573]: DEBUG oslo_vmware.api [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 1693.498343] env[61573]: value = "task-4836884" [ 1693.498343] env[61573]: _type = "Task" [ 1693.498343] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.509169] env[61573]: DEBUG oslo_vmware.api [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': task-4836884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.009520] env[61573]: DEBUG oslo_vmware.exceptions [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1694.009826] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.010422] env[61573]: ERROR nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1694.010422] env[61573]: Faults: ['InvalidArgument'] [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Traceback (most recent call last): [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] yield resources [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] self.driver.spawn(context, instance, image_meta, [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] self._fetch_image_if_missing(context, vi) [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] image_cache(vi, tmp_image_ds_loc) [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] vm_util.copy_virtual_disk( [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] session._wait_for_task(vmdk_copy_task) [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] return self.wait_for_task(task_ref) [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] return evt.wait() [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] result = hub.switch() [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] return self.greenlet.switch() [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] self.f(*self.args, **self.kw) [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] raise exceptions.translate_fault(task_info.error) [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Faults: ['InvalidArgument'] [ 1694.010422] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] [ 1694.011366] env[61573]: INFO nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Terminating instance [ 1694.012397] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.012603] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1694.012844] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7267938-0205-40d6-b83b-880e90df3179 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.016239] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1694.016437] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1694.017234] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5554a37-0997-4d16-8735-1ef96a6f631e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.021422] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1694.022177] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1694.024071] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2916d07c-10fb-4241-ba7c-aae5772d79be {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.026256] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1694.026480] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2cb484b-4459-4dd6-8a20-325c2e480974 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.030708] env[61573]: DEBUG oslo_vmware.api [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for the task: (returnval){ [ 1694.030708] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5265c3b3-974e-100c-de63-ca6821f633dd" [ 1694.030708] env[61573]: _type = "Task" [ 1694.030708] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.038823] env[61573]: DEBUG oslo_vmware.api [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5265c3b3-974e-100c-de63-ca6821f633dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.099033] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1694.099033] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1694.099033] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Deleting the datastore file [datastore2] 2d5777f8-a431-43bd-8934-7cc33fd14718 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1694.099033] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a22b9508-26ae-4858-8297-b6eb0225477c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.105535] env[61573]: DEBUG oslo_vmware.api [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 1694.105535] env[61573]: value = "task-4836886" [ 1694.105535] env[61573]: _type = "Task" [ 1694.105535] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.114070] env[61573]: DEBUG oslo_vmware.api [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': task-4836886, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.541697] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1694.541697] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Creating directory with path [datastore2] vmware_temp/38da171b-360b-45fe-8c55-a247a07e878c/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1694.541697] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c54f333e-6c02-4b06-bf46-376c873b9092 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.553469] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Created directory with path [datastore2] vmware_temp/38da171b-360b-45fe-8c55-a247a07e878c/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1694.553674] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Fetch image to [datastore2] vmware_temp/38da171b-360b-45fe-8c55-a247a07e878c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1694.553845] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/38da171b-360b-45fe-8c55-a247a07e878c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1694.554627] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053ce6d0-126a-4cf2-8dcb-6b357a099cc1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.561306] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b92565-7355-4cbf-8004-faf39237d2a4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.570552] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa9be5e-3411-4516-8f88-d7611c6dd006 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.601677] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247e0fbd-90f5-44f7-8dae-2e99af9cd871 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.611155] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4ea63856-f94c-42a1-8e49-6c1a9649d8a7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.619586] env[61573]: DEBUG oslo_vmware.api [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': task-4836886, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066553} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.619829] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1694.620017] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1694.620196] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1694.620371] env[61573]: INFO nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1694.622698] env[61573]: DEBUG nova.compute.claims [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1694.622892] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.623132] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.637210] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1694.831187] env[61573]: DEBUG oslo_vmware.rw_handles [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/38da171b-360b-45fe-8c55-a247a07e878c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1694.894068] env[61573]: DEBUG oslo_vmware.rw_handles [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1694.894321] env[61573]: DEBUG oslo_vmware.rw_handles [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/38da171b-360b-45fe-8c55-a247a07e878c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1694.927517] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58eb96e6-5fb2-4b51-a8de-99b6acaa4ea7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.935506] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8300b23b-9423-4023-ba96-06c9c68d6a94 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.965599] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d1e430-d77c-414f-9ee3-76521a0d1e18 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.973247] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4653c0be-4e15-4dc5-8573-1b8d11593c43 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.986037] env[61573]: DEBUG nova.compute.provider_tree [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1694.994624] env[61573]: DEBUG nova.scheduler.client.report [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1695.008921] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.386s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.009464] env[61573]: ERROR nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1695.009464] env[61573]: Faults: ['InvalidArgument'] [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Traceback (most recent call last): [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] self.driver.spawn(context, instance, image_meta, [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] self._fetch_image_if_missing(context, vi) [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] image_cache(vi, tmp_image_ds_loc) [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] vm_util.copy_virtual_disk( [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] session._wait_for_task(vmdk_copy_task) [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] return self.wait_for_task(task_ref) [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] return evt.wait() [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] result = hub.switch() [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] return self.greenlet.switch() [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] self.f(*self.args, **self.kw) [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] raise exceptions.translate_fault(task_info.error) [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Faults: ['InvalidArgument'] [ 1695.009464] env[61573]: ERROR nova.compute.manager [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] [ 1695.010387] env[61573]: DEBUG nova.compute.utils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1695.011700] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Build of instance 2d5777f8-a431-43bd-8934-7cc33fd14718 was re-scheduled: A specified parameter was not correct: fileType [ 1695.011700] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1695.012090] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1695.012266] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1695.012439] env[61573]: DEBUG nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1695.012605] env[61573]: DEBUG nova.network.neutron [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1695.581212] env[61573]: DEBUG nova.network.neutron [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.593602] env[61573]: INFO nova.compute.manager [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Took 0.58 seconds to deallocate network for instance. [ 1695.697202] env[61573]: INFO nova.scheduler.client.report [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Deleted allocations for instance 2d5777f8-a431-43bd-8934-7cc33fd14718 [ 1695.721452] env[61573]: DEBUG oslo_concurrency.lockutils [None req-ba233acd-2b1a-4e4e-ac45-dfb5c713c96f tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "2d5777f8-a431-43bd-8934-7cc33fd14718" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 663.933s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.721725] env[61573]: DEBUG oslo_concurrency.lockutils [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "2d5777f8-a431-43bd-8934-7cc33fd14718" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 468.479s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.721830] env[61573]: DEBUG oslo_concurrency.lockutils [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "2d5777f8-a431-43bd-8934-7cc33fd14718-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.721950] env[61573]: DEBUG oslo_concurrency.lockutils [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "2d5777f8-a431-43bd-8934-7cc33fd14718-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.722136] env[61573]: DEBUG oslo_concurrency.lockutils [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "2d5777f8-a431-43bd-8934-7cc33fd14718-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.724270] env[61573]: INFO nova.compute.manager [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Terminating instance [ 1695.727723] env[61573]: DEBUG nova.compute.manager [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1695.727928] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1695.728521] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cfe78ae8-8bef-4626-9857-4c8145a7551a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.738716] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f740a9-079c-4ff7-b479-651f5540289e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.750218] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1695.780205] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2d5777f8-a431-43bd-8934-7cc33fd14718 could not be found. [ 1695.780401] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1695.781138] env[61573]: INFO nova.compute.manager [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1695.781138] env[61573]: DEBUG oslo.service.loopingcall [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1695.781261] env[61573]: DEBUG nova.compute.manager [-] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1695.781311] env[61573]: DEBUG nova.network.neutron [-] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1695.805939] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.806268] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.808112] env[61573]: INFO nova.compute.claims [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1695.821529] env[61573]: DEBUG nova.network.neutron [-] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.841449] env[61573]: INFO nova.compute.manager [-] [instance: 2d5777f8-a431-43bd-8934-7cc33fd14718] Took 0.06 seconds to deallocate network for instance. [ 1695.974805] env[61573]: DEBUG oslo_concurrency.lockutils [None req-75ee7648-98b2-44ba-b9d1-62f9a74ce9ee tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "2d5777f8-a431-43bd-8934-7cc33fd14718" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.253s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.039494] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742872b6-4a55-4f63-8fe8-345d62f21138 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.047021] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25ee124-770c-4834-80d4-41e00928830b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.077389] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e884ba4e-6db0-4d82-8610-fff5023cc54a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.084767] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246fa1f7-e2dd-45d8-9941-e840b18d7aaa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.097779] env[61573]: DEBUG nova.compute.provider_tree [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1696.107954] env[61573]: DEBUG nova.scheduler.client.report [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1696.123742] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.317s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.124239] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1696.161583] env[61573]: DEBUG nova.compute.utils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1696.162866] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1696.163104] env[61573]: DEBUG nova.network.neutron [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1696.172365] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1696.228990] env[61573]: DEBUG nova.policy [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08c59199cd604147a2f0a2cd0dc95773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e08d442d2b554ce6bd9e2cc031cf6735', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1696.238689] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1696.264281] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1696.264538] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1696.264696] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1696.264879] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1696.265061] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1696.265256] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1696.265471] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1696.265634] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1696.265839] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1696.266105] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1696.266312] env[61573]: DEBUG nova.virt.hardware [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1696.267254] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efd9af8-5642-4006-98c0-93acfbf4c943 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.275642] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13edce9-e702-4604-9642-3b2bdbac8343 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.586215] env[61573]: DEBUG nova.network.neutron [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Successfully created port: 4e9a59c0-d93d-40f9-8b36-c4a32de994e2 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1697.352106] env[61573]: DEBUG nova.network.neutron [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Successfully updated port: 4e9a59c0-d93d-40f9-8b36-c4a32de994e2 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1697.363068] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "refresh_cache-04d29fd0-acd8-407a-8b53-341c78e7b341" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.363216] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "refresh_cache-04d29fd0-acd8-407a-8b53-341c78e7b341" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1697.363372] env[61573]: DEBUG nova.network.neutron [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1697.430924] env[61573]: DEBUG nova.network.neutron [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1697.625495] env[61573]: DEBUG nova.network.neutron [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Updating instance_info_cache with network_info: [{"id": "4e9a59c0-d93d-40f9-8b36-c4a32de994e2", "address": "fa:16:3e:b4:69:56", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e9a59c0-d9", "ovs_interfaceid": "4e9a59c0-d93d-40f9-8b36-c4a32de994e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.628251] env[61573]: DEBUG nova.compute.manager [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Received event network-vif-plugged-4e9a59c0-d93d-40f9-8b36-c4a32de994e2 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1697.628460] env[61573]: DEBUG oslo_concurrency.lockutils [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] Acquiring lock "04d29fd0-acd8-407a-8b53-341c78e7b341-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.628778] env[61573]: DEBUG oslo_concurrency.lockutils [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.628964] env[61573]: DEBUG oslo_concurrency.lockutils [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.629154] env[61573]: DEBUG nova.compute.manager [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] No waiting events found dispatching network-vif-plugged-4e9a59c0-d93d-40f9-8b36-c4a32de994e2 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1697.629321] env[61573]: WARNING nova.compute.manager [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Received unexpected event network-vif-plugged-4e9a59c0-d93d-40f9-8b36-c4a32de994e2 for instance with vm_state building and task_state spawning. [ 1697.629485] env[61573]: DEBUG nova.compute.manager [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Received event network-changed-4e9a59c0-d93d-40f9-8b36-c4a32de994e2 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1697.629638] env[61573]: DEBUG nova.compute.manager [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Refreshing instance network info cache due to event network-changed-4e9a59c0-d93d-40f9-8b36-c4a32de994e2. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1697.629803] env[61573]: DEBUG oslo_concurrency.lockutils [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] Acquiring lock "refresh_cache-04d29fd0-acd8-407a-8b53-341c78e7b341" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.639502] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "refresh_cache-04d29fd0-acd8-407a-8b53-341c78e7b341" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.639789] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Instance network_info: |[{"id": "4e9a59c0-d93d-40f9-8b36-c4a32de994e2", "address": "fa:16:3e:b4:69:56", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e9a59c0-d9", "ovs_interfaceid": "4e9a59c0-d93d-40f9-8b36-c4a32de994e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1697.640078] env[61573]: DEBUG oslo_concurrency.lockutils [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] Acquired lock "refresh_cache-04d29fd0-acd8-407a-8b53-341c78e7b341" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1697.640259] env[61573]: DEBUG nova.network.neutron [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Refreshing network info cache for port 4e9a59c0-d93d-40f9-8b36-c4a32de994e2 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1697.641546] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:69:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e9a59c0-d93d-40f9-8b36-c4a32de994e2', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1697.648905] env[61573]: DEBUG oslo.service.loopingcall [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1697.651975] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1697.652425] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b9a7ceb-d369-4fa3-a475-1cb067d35b1b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.673470] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1697.673470] env[61573]: value = "task-4836887" [ 1697.673470] env[61573]: _type = "Task" [ 1697.673470] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.938291] env[61573]: DEBUG nova.network.neutron [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Updated VIF entry in instance network info cache for port 4e9a59c0-d93d-40f9-8b36-c4a32de994e2. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1697.938653] env[61573]: DEBUG nova.network.neutron [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Updating instance_info_cache with network_info: [{"id": "4e9a59c0-d93d-40f9-8b36-c4a32de994e2", "address": "fa:16:3e:b4:69:56", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e9a59c0-d9", "ovs_interfaceid": "4e9a59c0-d93d-40f9-8b36-c4a32de994e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.949473] env[61573]: DEBUG oslo_concurrency.lockutils [req-ed6b3359-5490-4610-88b9-46d668de4ce0 req-9a783b47-3760-4110-8b5a-d7b3fe5a6c5b service nova] Releasing lock "refresh_cache-04d29fd0-acd8-407a-8b53-341c78e7b341" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.184209] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836887, 'name': CreateVM_Task, 'duration_secs': 0.375306} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.184419] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1698.185165] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.185366] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.185693] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1698.185957] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-375661a2-8966-464b-b333-e6289cb4c52c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.191094] env[61573]: DEBUG oslo_vmware.api [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 1698.191094] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52f32452-fb46-ef22-fa4a-30c3a7a7a076" [ 1698.191094] env[61573]: _type = "Task" [ 1698.191094] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.199923] env[61573]: DEBUG oslo_vmware.api [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52f32452-fb46-ef22-fa4a-30c3a7a7a076, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.702224] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.702581] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1698.702581] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.404672] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1704.405117] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11295}} [ 1704.417287] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] There are 0 instances to clean {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11304}} [ 1710.416806] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1711.400191] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.403761] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.404535] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1712.404735] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1712.427621] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.427794] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.427930] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.428045] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.428175] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.428296] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.428413] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.428987] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.428987] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.428987] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1712.428987] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1712.429454] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.429596] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances with incomplete migration {{(pid=61573) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11333}} [ 1713.412713] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.404498] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.417609] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.417959] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.418242] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.418415] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1714.419628] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbaf98c-c838-44a5-9485-c69281829e1b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.428806] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b0ffe1-1cdd-4959-97a1-9f7db9f92a7b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.445294] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9e21b9-0e3d-4d1b-a9e9-3ef05a2c007b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.452823] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef9e98b-72cf-43ca-a99b-7fc2b9f813f6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.482552] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180542MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1714.482781] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.483075] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.634238] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b512941-2180-44a7-a69d-b54e57856cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.634395] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.634524] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.634647] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.634769] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.634974] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.635127] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.635243] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.635356] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.635469] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1714.635660] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1714.635810] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '74', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_de5ad5b083124bcab1da2e5a2ae152c9': '1', 'io_workload': '10', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_5700bfb489004c86ac0b8ec509bd3758': '1', 'num_proj_c143aa5f5238459388ccd140702680ab': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_33f496dcb85142ae9da6ddf8e8e9e7c9': '1', 'num_task_spawning': '2', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1714.652498] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing inventories for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1714.667081] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Updating ProviderTree inventory for provider b1eff98b-2b30-4574-a87d-d151235a2dba from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1714.667278] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Updating inventory in ProviderTree for provider b1eff98b-2b30-4574-a87d-d151235a2dba with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1714.678936] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing aggregate associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, aggregates: None {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1714.698113] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing trait associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1714.829142] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9117e289-61b8-49f3-8740-4b94be35ddf5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.841494] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa375b5f-50e7-4321-9dbe-9b794a0c7aed {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.890528] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16168a25-8491-4b98-b86b-78426aae5fba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.898460] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf073cd1-9f9a-4591-92ea-6c8ebf9fe7a4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.912666] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1714.921627] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1714.937678] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1714.937923] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.455s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.937534] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1715.937948] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1715.937948] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1716.404642] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.398835] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.403442] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.403987] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.676842] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.699655] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Getting list of instances from cluster (obj){ [ 1744.699655] env[61573]: value = "domain-c8" [ 1744.699655] env[61573]: _type = "ClusterComputeResource" [ 1744.699655] env[61573]: } {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1744.701193] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c0a86e-5ef6-446f-8e59-dcb6a5e1db12 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.720044] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Got total of 10 instances {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1744.720245] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 4b512941-2180-44a7-a69d-b54e57856cb0 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.720406] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.720568] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.720723] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 3e075864-6503-4d83-bbd4-f0bec8104e03 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.720878] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid d2350c0d-8c21-4dc2-b0d4-01f98799ab6d {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.721551] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid adb2282f-224e-4a56-abd8-cd91bd0023f0 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.721551] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid d272f481-f590-46e9-9f51-0c7601ff34ce {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.721551] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid c96755a9-1e1c-42ed-a170-35914ef05333 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.721551] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.721845] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 04d29fd0-acd8-407a-8b53-341c78e7b341 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 1744.722033] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "4b512941-2180-44a7-a69d-b54e57856cb0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.722326] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.722535] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.722733] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "3e075864-6503-4d83-bbd4-f0bec8104e03" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.722929] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.723145] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "adb2282f-224e-4a56-abd8-cd91bd0023f0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.723349] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "d272f481-f590-46e9-9f51-0c7601ff34ce" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.723551] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "c96755a9-1e1c-42ed-a170-35914ef05333" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.723744] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.723934] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "04d29fd0-acd8-407a-8b53-341c78e7b341" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.866031] env[61573]: WARNING oslo_vmware.rw_handles [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1744.866031] env[61573]: ERROR oslo_vmware.rw_handles [ 1744.866505] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/38da171b-360b-45fe-8c55-a247a07e878c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1744.868396] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1744.868636] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Copying Virtual Disk [datastore2] vmware_temp/38da171b-360b-45fe-8c55-a247a07e878c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/38da171b-360b-45fe-8c55-a247a07e878c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1744.868920] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a62dae8-1549-46f2-bb74-cc22edff2fd1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.877584] env[61573]: DEBUG oslo_vmware.api [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for the task: (returnval){ [ 1744.877584] env[61573]: value = "task-4836888" [ 1744.877584] env[61573]: _type = "Task" [ 1744.877584] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.886890] env[61573]: DEBUG oslo_vmware.api [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': task-4836888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.388702] env[61573]: DEBUG oslo_vmware.exceptions [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1745.389028] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.389579] env[61573]: ERROR nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1745.389579] env[61573]: Faults: ['InvalidArgument'] [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Traceback (most recent call last): [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] yield resources [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] self.driver.spawn(context, instance, image_meta, [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] self._fetch_image_if_missing(context, vi) [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] image_cache(vi, tmp_image_ds_loc) [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] vm_util.copy_virtual_disk( [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] session._wait_for_task(vmdk_copy_task) [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] return self.wait_for_task(task_ref) [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] return evt.wait() [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] result = hub.switch() [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] return self.greenlet.switch() [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] self.f(*self.args, **self.kw) [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] raise exceptions.translate_fault(task_info.error) [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Faults: ['InvalidArgument'] [ 1745.389579] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] [ 1745.390697] env[61573]: INFO nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Terminating instance [ 1745.391452] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.391655] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1745.391894] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf3cbf1f-500f-461a-a4fe-97e50b50a1a5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.394128] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1745.394379] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1745.395122] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90467962-d854-4e7d-add0-96d012e5220b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.402728] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1745.402968] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1205b34-cfc5-4b87-8c72-76d14caaabc4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.405320] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1745.405493] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1745.406455] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8021a7d9-e31b-4dfc-a085-a599c8c6b30d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.411638] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1745.411638] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5239e707-66f9-4798-e9e2-5b077a9e6a97" [ 1745.411638] env[61573]: _type = "Task" [ 1745.411638] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.421501] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5239e707-66f9-4798-e9e2-5b077a9e6a97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.481799] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1745.482136] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1745.482434] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Deleting the datastore file [datastore2] 4b512941-2180-44a7-a69d-b54e57856cb0 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1745.482821] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cdc3fb42-663b-4574-b5c2-077f053712d9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.489494] env[61573]: DEBUG oslo_vmware.api [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for the task: (returnval){ [ 1745.489494] env[61573]: value = "task-4836890" [ 1745.489494] env[61573]: _type = "Task" [ 1745.489494] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.498112] env[61573]: DEBUG oslo_vmware.api [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': task-4836890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.922372] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1745.922761] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating directory with path [datastore2] vmware_temp/33b84d93-cff9-444e-8f14-2253a328b7af/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1745.923872] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5b40368-4bbe-4b93-ae65-25a43d516390 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.936069] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created directory with path [datastore2] vmware_temp/33b84d93-cff9-444e-8f14-2253a328b7af/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1745.936849] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Fetch image to [datastore2] vmware_temp/33b84d93-cff9-444e-8f14-2253a328b7af/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1745.936849] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/33b84d93-cff9-444e-8f14-2253a328b7af/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1745.937484] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c67c0d-af93-461c-a4ac-1c0a7f1eb727 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.946032] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f11f46-4f3f-4678-af9f-6b8a35b571e1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.954656] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b9dcfe-44f0-4d06-97d3-316dbb96b466 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.985058] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfd399b-0c0f-4daf-bf6a-0bf925ecf280 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.994365] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cca06e6e-c5ce-4d0d-8690-f9fe2903b16a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.003044] env[61573]: DEBUG oslo_vmware.api [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Task: {'id': task-4836890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069492} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.003044] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1746.003044] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1746.003044] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1746.003293] env[61573]: INFO nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1746.005683] env[61573]: DEBUG nova.compute.claims [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1746.005853] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.006085] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.020641] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1746.077766] env[61573]: DEBUG oslo_vmware.rw_handles [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/33b84d93-cff9-444e-8f14-2253a328b7af/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1746.138910] env[61573]: DEBUG oslo_vmware.rw_handles [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1746.139125] env[61573]: DEBUG oslo_vmware.rw_handles [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/33b84d93-cff9-444e-8f14-2253a328b7af/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1746.247065] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583d8d7c-9ce2-4ba0-a4a9-fef71f2a8d7e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.253856] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b7e6f5-ac35-43a1-8e5d-e2e20502568d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.287754] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8e9fb0-aadb-46eb-9538-dc242e1ed282 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.295981] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51933dd3-b554-412d-baea-612be5e0f730 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.309723] env[61573]: DEBUG nova.compute.provider_tree [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1746.319477] env[61573]: DEBUG nova.scheduler.client.report [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1746.334980] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.329s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.335548] env[61573]: ERROR nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1746.335548] env[61573]: Faults: ['InvalidArgument'] [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Traceback (most recent call last): [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] self.driver.spawn(context, instance, image_meta, [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] self._fetch_image_if_missing(context, vi) [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] image_cache(vi, tmp_image_ds_loc) [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] vm_util.copy_virtual_disk( [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] session._wait_for_task(vmdk_copy_task) [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] return self.wait_for_task(task_ref) [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] return evt.wait() [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] result = hub.switch() [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] return self.greenlet.switch() [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] self.f(*self.args, **self.kw) [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] raise exceptions.translate_fault(task_info.error) [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Faults: ['InvalidArgument'] [ 1746.335548] env[61573]: ERROR nova.compute.manager [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] [ 1746.336496] env[61573]: DEBUG nova.compute.utils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1746.337806] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Build of instance 4b512941-2180-44a7-a69d-b54e57856cb0 was re-scheduled: A specified parameter was not correct: fileType [ 1746.337806] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1746.338207] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1746.338377] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1746.338547] env[61573]: DEBUG nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1746.338709] env[61573]: DEBUG nova.network.neutron [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1746.935820] env[61573]: DEBUG nova.network.neutron [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.945989] env[61573]: INFO nova.compute.manager [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Took 0.61 seconds to deallocate network for instance. [ 1747.064832] env[61573]: INFO nova.scheduler.client.report [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Deleted allocations for instance 4b512941-2180-44a7-a69d-b54e57856cb0 [ 1747.094030] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2724c5-4eaf-4873-b14c-19501f9ca5f9 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "4b512941-2180-44a7-a69d-b54e57856cb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 685.862s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.094030] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "4b512941-2180-44a7-a69d-b54e57856cb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 489.569s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.094266] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Acquiring lock "4b512941-2180-44a7-a69d-b54e57856cb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.094467] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "4b512941-2180-44a7-a69d-b54e57856cb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.094641] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "4b512941-2180-44a7-a69d-b54e57856cb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.097031] env[61573]: INFO nova.compute.manager [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Terminating instance [ 1747.101066] env[61573]: DEBUG nova.compute.manager [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1747.101066] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1747.101320] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0de779b2-482f-4654-b418-c6075a00eb90 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.111171] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f19d43e-2347-4b49-8a72-bc0a42abba90 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.144650] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4b512941-2180-44a7-a69d-b54e57856cb0 could not be found. [ 1747.144872] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1747.145069] env[61573]: INFO nova.compute.manager [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1747.145325] env[61573]: DEBUG oslo.service.loopingcall [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1747.145578] env[61573]: DEBUG nova.compute.manager [-] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1747.145681] env[61573]: DEBUG nova.network.neutron [-] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1747.170916] env[61573]: DEBUG nova.network.neutron [-] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.179851] env[61573]: INFO nova.compute.manager [-] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] Took 0.03 seconds to deallocate network for instance. [ 1747.266118] env[61573]: DEBUG oslo_concurrency.lockutils [None req-0e0341fc-64be-4dc9-b642-30f0a5ed0805 tempest-AttachVolumeShelveTestJSON-1839295289 tempest-AttachVolumeShelveTestJSON-1839295289-project-member] Lock "4b512941-2180-44a7-a69d-b54e57856cb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.266950] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "4b512941-2180-44a7-a69d-b54e57856cb0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.545s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.267155] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b512941-2180-44a7-a69d-b54e57856cb0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1747.267335] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "4b512941-2180-44a7-a69d-b54e57856cb0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.007881] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "cef978e5-e61d-4188-a58e-1b5690731c1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.008283] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "cef978e5-e61d-4188-a58e-1b5690731c1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.019916] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1749.047920] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "4491bc7f-5014-4631-a7ec-486928ef0cf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.048183] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "4491bc7f-5014-4631-a7ec-486928ef0cf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.077416] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.077662] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.079106] env[61573]: INFO nova.compute.claims [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1749.276819] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee0a559-4d26-40f1-a533-0d3e61a0065c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.284996] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60279b68-d36b-49b7-ab36-3c9558ea4b4a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.315611] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b98f06e-608e-4962-939c-c0de34e731e2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.324057] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56da75e-f8d9-45de-9756-f8950b4b5631 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.337777] env[61573]: DEBUG nova.compute.provider_tree [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1749.348217] env[61573]: DEBUG nova.scheduler.client.report [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1749.366450] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.289s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.366962] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1749.402178] env[61573]: DEBUG nova.compute.utils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1749.404455] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1749.404671] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1749.417365] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1749.482548] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1749.508164] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1749.508428] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1749.508586] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1749.508777] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1749.508919] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1749.509102] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1749.509343] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1749.509522] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1749.509717] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1749.509889] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1749.510076] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1749.510954] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebaec854-f9b7-42e7-a68a-3f00ddadfa03 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.515072] env[61573]: DEBUG nova.policy [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '161622463606455fa04c1bac29a04eb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6cc1ffdc2dc45ad85b0be67c4c8b6c1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1749.522254] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4831651c-71f9-410d-bf7e-ab58c17c6615 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.890195] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Successfully created port: c9a73e8b-d444-4fbe-b6f1-7de040bb5a84 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1750.587904] env[61573]: DEBUG nova.compute.manager [req-b52ce901-a1cb-4701-bac5-c432c53b0548 req-100687b7-f0c0-41ed-8172-eeb118361224 service nova] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Received event network-vif-plugged-c9a73e8b-d444-4fbe-b6f1-7de040bb5a84 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1750.588188] env[61573]: DEBUG oslo_concurrency.lockutils [req-b52ce901-a1cb-4701-bac5-c432c53b0548 req-100687b7-f0c0-41ed-8172-eeb118361224 service nova] Acquiring lock "cef978e5-e61d-4188-a58e-1b5690731c1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.588345] env[61573]: DEBUG oslo_concurrency.lockutils [req-b52ce901-a1cb-4701-bac5-c432c53b0548 req-100687b7-f0c0-41ed-8172-eeb118361224 service nova] Lock "cef978e5-e61d-4188-a58e-1b5690731c1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.588526] env[61573]: DEBUG oslo_concurrency.lockutils [req-b52ce901-a1cb-4701-bac5-c432c53b0548 req-100687b7-f0c0-41ed-8172-eeb118361224 service nova] Lock "cef978e5-e61d-4188-a58e-1b5690731c1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.588669] env[61573]: DEBUG nova.compute.manager [req-b52ce901-a1cb-4701-bac5-c432c53b0548 req-100687b7-f0c0-41ed-8172-eeb118361224 service nova] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] No waiting events found dispatching network-vif-plugged-c9a73e8b-d444-4fbe-b6f1-7de040bb5a84 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1750.588825] env[61573]: WARNING nova.compute.manager [req-b52ce901-a1cb-4701-bac5-c432c53b0548 req-100687b7-f0c0-41ed-8172-eeb118361224 service nova] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Received unexpected event network-vif-plugged-c9a73e8b-d444-4fbe-b6f1-7de040bb5a84 for instance with vm_state building and task_state spawning. [ 1750.633934] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Successfully updated port: c9a73e8b-d444-4fbe-b6f1-7de040bb5a84 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1750.645623] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "refresh_cache-cef978e5-e61d-4188-a58e-1b5690731c1b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1750.645787] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "refresh_cache-cef978e5-e61d-4188-a58e-1b5690731c1b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.645939] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1750.716871] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1750.940250] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Updating instance_info_cache with network_info: [{"id": "c9a73e8b-d444-4fbe-b6f1-7de040bb5a84", "address": "fa:16:3e:26:fa:00", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9a73e8b-d4", "ovs_interfaceid": "c9a73e8b-d444-4fbe-b6f1-7de040bb5a84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.957752] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "refresh_cache-cef978e5-e61d-4188-a58e-1b5690731c1b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.958068] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Instance network_info: |[{"id": "c9a73e8b-d444-4fbe-b6f1-7de040bb5a84", "address": "fa:16:3e:26:fa:00", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9a73e8b-d4", "ovs_interfaceid": "c9a73e8b-d444-4fbe-b6f1-7de040bb5a84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1750.958525] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:fa:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62f28d75-4e6a-4ae5-b8b3-d0652ea26d08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9a73e8b-d444-4fbe-b6f1-7de040bb5a84', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1750.966204] env[61573]: DEBUG oslo.service.loopingcall [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1750.966797] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1750.967048] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87966727-d6b1-4f58-bf6c-164e80f7f399 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.988202] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1750.988202] env[61573]: value = "task-4836891" [ 1750.988202] env[61573]: _type = "Task" [ 1750.988202] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.998950] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836891, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.498960] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836891, 'name': CreateVM_Task, 'duration_secs': 0.300045} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.499183] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1751.499787] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.499960] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.500314] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1751.500571] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5e25d0d-c166-4a79-9bb2-ea78ff60c018 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.505708] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1751.505708] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52990e91-9724-effd-0f40-f92caa71132b" [ 1751.505708] env[61573]: _type = "Task" [ 1751.505708] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.513612] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52990e91-9724-effd-0f40-f92caa71132b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.017201] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.017582] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1752.017582] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.623278] env[61573]: DEBUG nova.compute.manager [req-d5fbccf2-6500-4e20-be9b-e5cb4c69457c req-8e958437-285c-41ee-a3cd-d77b62ecdd35 service nova] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Received event network-changed-c9a73e8b-d444-4fbe-b6f1-7de040bb5a84 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1752.623467] env[61573]: DEBUG nova.compute.manager [req-d5fbccf2-6500-4e20-be9b-e5cb4c69457c req-8e958437-285c-41ee-a3cd-d77b62ecdd35 service nova] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Refreshing instance network info cache due to event network-changed-c9a73e8b-d444-4fbe-b6f1-7de040bb5a84. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1752.623723] env[61573]: DEBUG oslo_concurrency.lockutils [req-d5fbccf2-6500-4e20-be9b-e5cb4c69457c req-8e958437-285c-41ee-a3cd-d77b62ecdd35 service nova] Acquiring lock "refresh_cache-cef978e5-e61d-4188-a58e-1b5690731c1b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.623822] env[61573]: DEBUG oslo_concurrency.lockutils [req-d5fbccf2-6500-4e20-be9b-e5cb4c69457c req-8e958437-285c-41ee-a3cd-d77b62ecdd35 service nova] Acquired lock "refresh_cache-cef978e5-e61d-4188-a58e-1b5690731c1b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.623990] env[61573]: DEBUG nova.network.neutron [req-d5fbccf2-6500-4e20-be9b-e5cb4c69457c req-8e958437-285c-41ee-a3cd-d77b62ecdd35 service nova] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Refreshing network info cache for port c9a73e8b-d444-4fbe-b6f1-7de040bb5a84 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1752.937918] env[61573]: DEBUG nova.network.neutron [req-d5fbccf2-6500-4e20-be9b-e5cb4c69457c req-8e958437-285c-41ee-a3cd-d77b62ecdd35 service nova] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Updated VIF entry in instance network info cache for port c9a73e8b-d444-4fbe-b6f1-7de040bb5a84. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1752.938292] env[61573]: DEBUG nova.network.neutron [req-d5fbccf2-6500-4e20-be9b-e5cb4c69457c req-8e958437-285c-41ee-a3cd-d77b62ecdd35 service nova] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Updating instance_info_cache with network_info: [{"id": "c9a73e8b-d444-4fbe-b6f1-7de040bb5a84", "address": "fa:16:3e:26:fa:00", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9a73e8b-d4", "ovs_interfaceid": "c9a73e8b-d444-4fbe-b6f1-7de040bb5a84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.949029] env[61573]: DEBUG oslo_concurrency.lockutils [req-d5fbccf2-6500-4e20-be9b-e5cb4c69457c req-8e958437-285c-41ee-a3cd-d77b62ecdd35 service nova] Releasing lock "refresh_cache-cef978e5-e61d-4188-a58e-1b5690731c1b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.068380] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquiring lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.068732] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.450691] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.404022] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.404022] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1772.404022] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1772.428221] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.428408] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.428676] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.428908] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.429088] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.429283] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.429446] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.429594] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.429741] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.429886] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1772.430044] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1773.404266] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.623210] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "2dc713f6-b67b-4360-a751-29b7218e130a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.623210] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "2dc713f6-b67b-4360-a751-29b7218e130a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.404734] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1774.417268] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.417495] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.417663] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.417820] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1774.418941] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2563288-4b92-4e2d-a7f9-6b09a74b3b81 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.428364] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dbbe23-efe4-40fe-8052-4074689c80df {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.443588] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a13c0e5-6fc2-4066-aab4-5ec8a3d67013 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.450797] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b19ea9-8798-4608-a77a-6022f33b5315 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.484107] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180561MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1774.485083] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.485083] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.573229] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.573389] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.573543] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.573802] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.573802] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.573882] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.574277] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.574277] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.574277] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.574407] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cef978e5-e61d-4188-a58e-1b5690731c1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1774.588628] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1774.600180] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1774.612810] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1774.613074] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1774.613239] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '75', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '7', 'num_os_type_None': '10', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '3', 'io_workload': '10', 'num_proj_5700bfb489004c86ac0b8ec509bd3758': '1', 'num_proj_c143aa5f5238459388ccd140702680ab': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_33f496dcb85142ae9da6ddf8e8e9e7c9': '1', 'num_task_spawning': '3', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1774.814918] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ff1758-d965-4cf9-886a-ee78c7aa5a85 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.826520] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28ce504-e7ca-4933-8e1f-70493d8c8a57 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.879066] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c5b22b-92a6-41bb-aaf8-65294c25a257 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.891069] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac4d356-4a05-464b-9277-77d5d7e5b5db {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.913655] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.922960] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1774.939758] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1774.939952] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.455s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.940594] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1775.940594] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1776.404549] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.403907] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.403907] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1778.957080] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.957430] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.399735] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1791.431586] env[61573]: WARNING oslo_vmware.rw_handles [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1791.431586] env[61573]: ERROR oslo_vmware.rw_handles [ 1791.432301] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/33b84d93-cff9-444e-8f14-2253a328b7af/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1791.434045] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1791.434311] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Copying Virtual Disk [datastore2] vmware_temp/33b84d93-cff9-444e-8f14-2253a328b7af/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/33b84d93-cff9-444e-8f14-2253a328b7af/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1791.434647] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0545f89b-3bd0-480e-9c7b-bd145ed3552a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.443874] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1791.443874] env[61573]: value = "task-4836892" [ 1791.443874] env[61573]: _type = "Task" [ 1791.443874] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.452858] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836892, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.955023] env[61573]: DEBUG oslo_vmware.exceptions [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1791.955344] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.955904] env[61573]: ERROR nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1791.955904] env[61573]: Faults: ['InvalidArgument'] [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Traceback (most recent call last): [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] yield resources [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] self.driver.spawn(context, instance, image_meta, [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] self._fetch_image_if_missing(context, vi) [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] image_cache(vi, tmp_image_ds_loc) [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] vm_util.copy_virtual_disk( [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] session._wait_for_task(vmdk_copy_task) [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] return self.wait_for_task(task_ref) [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] return evt.wait() [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] result = hub.switch() [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] return self.greenlet.switch() [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] self.f(*self.args, **self.kw) [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] raise exceptions.translate_fault(task_info.error) [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Faults: ['InvalidArgument'] [ 1791.955904] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] [ 1791.957115] env[61573]: INFO nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Terminating instance [ 1791.957843] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.958066] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1791.958329] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d86e1e25-1b10-47bd-b773-407496c4fd68 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.960764] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1791.960967] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1791.961756] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6cd12c-9d76-4d69-93d9-b859c08cf533 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.969194] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1791.969448] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be93035a-ce21-415b-b3ac-f1fa337fb5b2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.971736] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1791.971908] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1791.972901] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a540efbc-930b-4ad6-8f4a-a7562d7234c0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.978332] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1791.978332] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5207e602-8b54-04b1-9055-ba6cacc8eff7" [ 1791.978332] env[61573]: _type = "Task" [ 1791.978332] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.986700] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5207e602-8b54-04b1-9055-ba6cacc8eff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.039068] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1792.039319] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1792.039511] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleting the datastore file [datastore2] a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1792.039795] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77cd7886-97ce-4650-842e-34e9fafcabac {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.046656] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1792.046656] env[61573]: value = "task-4836894" [ 1792.046656] env[61573]: _type = "Task" [ 1792.046656] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.054923] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.489292] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1792.489698] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating directory with path [datastore2] vmware_temp/a86bfe39-c24e-4cf4-8ab3-1e103cde75c4/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1792.489787] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3071cde-9ee6-43b7-b2f1-b8518eef4272 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.503097] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created directory with path [datastore2] vmware_temp/a86bfe39-c24e-4cf4-8ab3-1e103cde75c4/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1792.503097] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Fetch image to [datastore2] vmware_temp/a86bfe39-c24e-4cf4-8ab3-1e103cde75c4/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1792.503467] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/a86bfe39-c24e-4cf4-8ab3-1e103cde75c4/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1792.504118] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a51732-bea3-474c-a290-9bd21270e97d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.512153] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffe8abb-5a57-4344-a3c0-7a3ee23fe781 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.522729] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3637018-46a2-435c-b66d-dd224eacb28b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.558324] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8c951d-1a40-49c7-8f48-c1599efc28e6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.569052] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-23ffa54c-4542-43c0-8964-6258860e19e7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.569991] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078695} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.570271] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1792.570454] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1792.570629] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1792.571106] env[61573]: INFO nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1792.573026] env[61573]: DEBUG nova.compute.claims [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1792.573217] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.573438] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.598821] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1792.674865] env[61573]: DEBUG oslo_vmware.rw_handles [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a86bfe39-c24e-4cf4-8ab3-1e103cde75c4/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1792.735088] env[61573]: DEBUG oslo_vmware.rw_handles [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1792.735088] env[61573]: DEBUG oslo_vmware.rw_handles [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a86bfe39-c24e-4cf4-8ab3-1e103cde75c4/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1792.892889] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594867ca-76b3-45cf-9565-d9bec70f70e9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.902030] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95255b7f-bf0e-4076-bb92-4e03c08005e3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.932067] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9c34a1-3fc1-426d-ad94-6ed596753d1c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.939593] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34793fa-4be2-42ad-b064-781d1eb6c277 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.953715] env[61573]: DEBUG nova.compute.provider_tree [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1792.964983] env[61573]: DEBUG nova.scheduler.client.report [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1792.981641] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.408s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.982083] env[61573]: ERROR nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1792.982083] env[61573]: Faults: ['InvalidArgument'] [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Traceback (most recent call last): [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] self.driver.spawn(context, instance, image_meta, [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] self._fetch_image_if_missing(context, vi) [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] image_cache(vi, tmp_image_ds_loc) [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] vm_util.copy_virtual_disk( [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] session._wait_for_task(vmdk_copy_task) [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] return self.wait_for_task(task_ref) [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] return evt.wait() [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] result = hub.switch() [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] return self.greenlet.switch() [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] self.f(*self.args, **self.kw) [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] raise exceptions.translate_fault(task_info.error) [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Faults: ['InvalidArgument'] [ 1792.982083] env[61573]: ERROR nova.compute.manager [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] [ 1792.983029] env[61573]: DEBUG nova.compute.utils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1792.984451] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Build of instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 was re-scheduled: A specified parameter was not correct: fileType [ 1792.984451] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1792.984828] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1792.985010] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1792.985215] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1792.985373] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1793.329712] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.346798] env[61573]: INFO nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Took 0.36 seconds to deallocate network for instance. [ 1793.461887] env[61573]: INFO nova.scheduler.client.report [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleted allocations for instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 [ 1793.487038] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.850s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.488715] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 438.856s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.489271] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.489698] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.489991] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.492549] env[61573]: INFO nova.compute.manager [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Terminating instance [ 1793.495052] env[61573]: DEBUG nova.compute.manager [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1793.495369] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1793.496285] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6a6783c-d335-49af-b54e-d8e5f58eba69 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.506733] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75b00c9-b8cd-435c-854d-8c6c49ed0030 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.518408] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1793.542043] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a7ce8b7b-e8a3-4ebd-88aa-b46794f43626 could not be found. [ 1793.542205] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1793.542371] env[61573]: INFO nova.compute.manager [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1793.542622] env[61573]: DEBUG oslo.service.loopingcall [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1793.542855] env[61573]: DEBUG nova.compute.manager [-] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1793.542948] env[61573]: DEBUG nova.network.neutron [-] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1793.571695] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.571695] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.572868] env[61573]: INFO nova.compute.claims [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1793.575720] env[61573]: DEBUG nova.network.neutron [-] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.583477] env[61573]: INFO nova.compute.manager [-] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] Took 0.04 seconds to deallocate network for instance. [ 1793.698761] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1267e5f6-f17e-40ae-b755-0e235afd4377 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.699641] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 48.977s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.702863] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: a7ce8b7b-e8a3-4ebd-88aa-b46794f43626] During sync_power_state the instance has a pending task (deleting). Skip. [ 1793.702863] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "a7ce8b7b-e8a3-4ebd-88aa-b46794f43626" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.834461] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0136d453-80da-4f71-992a-1104918fbc45 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.842849] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b04672-4657-4b2d-b774-58935ad2d1ab {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.877174] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4a39a2-aaca-4ed5-8abd-9e2b1e6ff7d8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.885635] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766d0800-6de3-4b92-827b-6cfa530d637f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.899560] env[61573]: DEBUG nova.compute.provider_tree [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1793.910237] env[61573]: DEBUG nova.scheduler.client.report [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1793.928628] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.357s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.929339] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1793.992857] env[61573]: DEBUG nova.compute.utils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1793.994308] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1793.994501] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1794.004445] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1794.072620] env[61573]: DEBUG nova.policy [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '161622463606455fa04c1bac29a04eb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6cc1ffdc2dc45ad85b0be67c4c8b6c1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1794.075728] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1794.101333] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1794.101640] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1794.101800] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1794.101980] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1794.102141] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1794.102290] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1794.102498] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1794.102666] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1794.102925] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1794.103111] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1794.103290] env[61573]: DEBUG nova.virt.hardware [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1794.104165] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48cbd52-139c-421a-abee-4e48e13d9568 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.112474] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d9e620-3ead-47de-be24-1417466db71e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.599938] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Successfully created port: 8c7cb04f-f479-42c4-b596-905bd825b74c {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1795.362567] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Successfully updated port: 8c7cb04f-f479-42c4-b596-905bd825b74c {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1795.377772] env[61573]: DEBUG nova.compute.manager [req-61551a30-44ca-465a-a614-f7d50426ee81 req-533244d6-947d-4bb8-9ed3-91fc2ab93bf8 service nova] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Received event network-vif-plugged-8c7cb04f-f479-42c4-b596-905bd825b74c {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1795.377772] env[61573]: DEBUG oslo_concurrency.lockutils [req-61551a30-44ca-465a-a614-f7d50426ee81 req-533244d6-947d-4bb8-9ed3-91fc2ab93bf8 service nova] Acquiring lock "4491bc7f-5014-4631-a7ec-486928ef0cf9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.377772] env[61573]: DEBUG oslo_concurrency.lockutils [req-61551a30-44ca-465a-a614-f7d50426ee81 req-533244d6-947d-4bb8-9ed3-91fc2ab93bf8 service nova] Lock "4491bc7f-5014-4631-a7ec-486928ef0cf9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.377772] env[61573]: DEBUG oslo_concurrency.lockutils [req-61551a30-44ca-465a-a614-f7d50426ee81 req-533244d6-947d-4bb8-9ed3-91fc2ab93bf8 service nova] Lock "4491bc7f-5014-4631-a7ec-486928ef0cf9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.377772] env[61573]: DEBUG nova.compute.manager [req-61551a30-44ca-465a-a614-f7d50426ee81 req-533244d6-947d-4bb8-9ed3-91fc2ab93bf8 service nova] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] No waiting events found dispatching network-vif-plugged-8c7cb04f-f479-42c4-b596-905bd825b74c {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1795.377772] env[61573]: WARNING nova.compute.manager [req-61551a30-44ca-465a-a614-f7d50426ee81 req-533244d6-947d-4bb8-9ed3-91fc2ab93bf8 service nova] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Received unexpected event network-vif-plugged-8c7cb04f-f479-42c4-b596-905bd825b74c for instance with vm_state building and task_state spawning. [ 1795.380174] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "refresh_cache-4491bc7f-5014-4631-a7ec-486928ef0cf9" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.380313] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "refresh_cache-4491bc7f-5014-4631-a7ec-486928ef0cf9" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.380453] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1795.423605] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1795.631505] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Updating instance_info_cache with network_info: [{"id": "8c7cb04f-f479-42c4-b596-905bd825b74c", "address": "fa:16:3e:c2:3e:6c", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7cb04f-f4", "ovs_interfaceid": "8c7cb04f-f479-42c4-b596-905bd825b74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.644974] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "refresh_cache-4491bc7f-5014-4631-a7ec-486928ef0cf9" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.645253] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Instance network_info: |[{"id": "8c7cb04f-f479-42c4-b596-905bd825b74c", "address": "fa:16:3e:c2:3e:6c", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7cb04f-f4", "ovs_interfaceid": "8c7cb04f-f479-42c4-b596-905bd825b74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1795.645658] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:3e:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62f28d75-4e6a-4ae5-b8b3-d0652ea26d08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c7cb04f-f479-42c4-b596-905bd825b74c', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1795.653282] env[61573]: DEBUG oslo.service.loopingcall [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1795.653801] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1795.654036] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7a597b0-10c3-4ac0-a9d1-f510aa0901fe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.675154] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1795.675154] env[61573]: value = "task-4836895" [ 1795.675154] env[61573]: _type = "Task" [ 1795.675154] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.683757] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836895, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.186274] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836895, 'name': CreateVM_Task, 'duration_secs': 0.311089} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.186477] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1796.194815] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.195010] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.195336] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1796.195591] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c72df3a-82a2-45b1-a652-f15be4e1b356 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.201468] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1796.201468] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524064a1-3f64-108a-febb-19d9392fd229" [ 1796.201468] env[61573]: _type = "Task" [ 1796.201468] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.210221] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]524064a1-3f64-108a-febb-19d9392fd229, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.712529] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.712917] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1796.713013] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.456639] env[61573]: DEBUG nova.compute.manager [req-2a1efcb6-846b-4109-a356-5492671de236 req-b6282373-edaf-41a7-ab99-21a72268e590 service nova] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Received event network-changed-8c7cb04f-f479-42c4-b596-905bd825b74c {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1797.456837] env[61573]: DEBUG nova.compute.manager [req-2a1efcb6-846b-4109-a356-5492671de236 req-b6282373-edaf-41a7-ab99-21a72268e590 service nova] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Refreshing instance network info cache due to event network-changed-8c7cb04f-f479-42c4-b596-905bd825b74c. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1797.457078] env[61573]: DEBUG oslo_concurrency.lockutils [req-2a1efcb6-846b-4109-a356-5492671de236 req-b6282373-edaf-41a7-ab99-21a72268e590 service nova] Acquiring lock "refresh_cache-4491bc7f-5014-4631-a7ec-486928ef0cf9" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.457214] env[61573]: DEBUG oslo_concurrency.lockutils [req-2a1efcb6-846b-4109-a356-5492671de236 req-b6282373-edaf-41a7-ab99-21a72268e590 service nova] Acquired lock "refresh_cache-4491bc7f-5014-4631-a7ec-486928ef0cf9" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.457366] env[61573]: DEBUG nova.network.neutron [req-2a1efcb6-846b-4109-a356-5492671de236 req-b6282373-edaf-41a7-ab99-21a72268e590 service nova] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Refreshing network info cache for port 8c7cb04f-f479-42c4-b596-905bd825b74c {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1797.738342] env[61573]: DEBUG nova.network.neutron [req-2a1efcb6-846b-4109-a356-5492671de236 req-b6282373-edaf-41a7-ab99-21a72268e590 service nova] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Updated VIF entry in instance network info cache for port 8c7cb04f-f479-42c4-b596-905bd825b74c. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1797.738696] env[61573]: DEBUG nova.network.neutron [req-2a1efcb6-846b-4109-a356-5492671de236 req-b6282373-edaf-41a7-ab99-21a72268e590 service nova] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Updating instance_info_cache with network_info: [{"id": "8c7cb04f-f479-42c4-b596-905bd825b74c", "address": "fa:16:3e:c2:3e:6c", "network": {"id": "ded2505d-9d5b-43aa-bb27-cbc71dd6415f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1152814111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6cc1ffdc2dc45ad85b0be67c4c8b6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7cb04f-f4", "ovs_interfaceid": "8c7cb04f-f479-42c4-b596-905bd825b74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.748661] env[61573]: DEBUG oslo_concurrency.lockutils [req-2a1efcb6-846b-4109-a356-5492671de236 req-b6282373-edaf-41a7-ab99-21a72268e590 service nova] Releasing lock "refresh_cache-4491bc7f-5014-4631-a7ec-486928ef0cf9" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.185110] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.814815] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquiring lock "c3053874-e935-40c5-ac81-268e759611f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.815225] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "c3053874-e935-40c5-ac81-268e759611f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.731474] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "04d29fd0-acd8-407a-8b53-341c78e7b341" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.399252] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.423164] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.405843] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.405843] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1834.405843] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1834.425423] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.425605] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.425705] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.425827] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.425955] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.426097] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.426252] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.426379] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.426499] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.426618] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1834.426738] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1834.427239] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.427427] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.438573] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.438775] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.438938] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.439097] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1834.440197] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d587fd-9afb-464e-8510-7c7de358ec7a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.449066] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d47a5c8-9423-4d19-b41a-5f101917a7f9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.464392] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f86c04d-ab42-4bdc-b3b2-db9c1b25e718 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.471184] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16f4631-7e71-488a-8124-8a26d1b0c944 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.499863] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180533MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1834.500073] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.500248] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.580047] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.580348] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.580571] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.580757] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.580896] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.581028] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.581152] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.581269] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.581384] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cef978e5-e61d-4188-a58e-1b5690731c1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.581498] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1834.593382] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1834.604406] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1834.615715] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1834.626918] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1834.628029] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1834.628029] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '76', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '3', 'io_workload': '10', 'num_proj_5700bfb489004c86ac0b8ec509bd3758': '1', 'num_proj_c143aa5f5238459388ccd140702680ab': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_33f496dcb85142ae9da6ddf8e8e9e7c9': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_task_spawning': '2'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1834.800211] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514163ef-196b-4aab-b5fa-22f74d0955de {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.807680] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32052693-3754-4ece-8cfd-21b39631e937 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.837717] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0193f6e-4793-4cc0-a175-313fc579fb3e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.845063] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80285b2-dab1-4aa2-a734-567706ef7440 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.857875] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.866650] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1834.883626] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1834.883816] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.384s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.860682] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1835.860974] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1837.404701] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.405084] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.404701] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.399395] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.448572] env[61573]: WARNING oslo_vmware.rw_handles [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1841.448572] env[61573]: ERROR oslo_vmware.rw_handles [ 1841.449291] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/a86bfe39-c24e-4cf4-8ab3-1e103cde75c4/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1841.451035] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1841.451288] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Copying Virtual Disk [datastore2] vmware_temp/a86bfe39-c24e-4cf4-8ab3-1e103cde75c4/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/a86bfe39-c24e-4cf4-8ab3-1e103cde75c4/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1841.451569] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92b9eaac-2999-4098-b38f-a1b62a95ee16 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.460477] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1841.460477] env[61573]: value = "task-4836896" [ 1841.460477] env[61573]: _type = "Task" [ 1841.460477] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.468958] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.971104] env[61573]: DEBUG oslo_vmware.exceptions [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1841.971416] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.972051] env[61573]: ERROR nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1841.972051] env[61573]: Faults: ['InvalidArgument'] [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Traceback (most recent call last): [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] yield resources [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] self.driver.spawn(context, instance, image_meta, [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] self._fetch_image_if_missing(context, vi) [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] image_cache(vi, tmp_image_ds_loc) [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] vm_util.copy_virtual_disk( [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] session._wait_for_task(vmdk_copy_task) [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] return self.wait_for_task(task_ref) [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] return evt.wait() [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] result = hub.switch() [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] return self.greenlet.switch() [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] self.f(*self.args, **self.kw) [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] raise exceptions.translate_fault(task_info.error) [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Faults: ['InvalidArgument'] [ 1841.972051] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] [ 1841.973352] env[61573]: INFO nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Terminating instance [ 1841.973971] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.974199] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1841.974446] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b1851ec-f36b-497a-b375-1c5826d2a070 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.976826] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1841.977031] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1841.977754] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3fd5a1-ea42-49ef-8416-1a6ca53d05a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.984604] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1841.984835] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4f25fe0-b930-4c3f-a50b-87b7937048ea {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.986941] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1841.987129] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1841.988074] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41b2e8e8-f006-4034-8f67-7abe787bd824 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.993050] env[61573]: DEBUG oslo_vmware.api [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Waiting for the task: (returnval){ [ 1841.993050] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52136af7-b433-aa35-18fc-9ad5a02cce7d" [ 1841.993050] env[61573]: _type = "Task" [ 1841.993050] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.000463] env[61573]: DEBUG oslo_vmware.api [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52136af7-b433-aa35-18fc-9ad5a02cce7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.063009] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1842.063322] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1842.063596] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleting the datastore file [datastore2] e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1842.063906] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67dfde85-c776-47b7-acfc-ddd64e3c4781 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.071438] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 1842.071438] env[61573]: value = "task-4836898" [ 1842.071438] env[61573]: _type = "Task" [ 1842.071438] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.079562] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836898, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.504363] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1842.504756] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Creating directory with path [datastore2] vmware_temp/81757752-fad4-4a9e-bb98-2a7e12a1c62d/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1842.504905] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a08692db-e4db-4210-ba28-430ea784a065 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.518039] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Created directory with path [datastore2] vmware_temp/81757752-fad4-4a9e-bb98-2a7e12a1c62d/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1842.518039] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Fetch image to [datastore2] vmware_temp/81757752-fad4-4a9e-bb98-2a7e12a1c62d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1842.518375] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/81757752-fad4-4a9e-bb98-2a7e12a1c62d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1842.518915] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee99810-deec-450f-be91-215f1b2f3eb4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.529781] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cfd51d-13cb-430c-9182-28a4601c043e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.538714] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716ee3c2-4dc6-47c2-a410-844cec16173d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.569966] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e811362f-9392-4373-997d-5bf294117298 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.581695] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7fce42fc-171e-4568-8266-32105bb7d581 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.583422] env[61573]: DEBUG oslo_vmware.api [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836898, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067489} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.583660] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1842.583839] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1842.584019] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1842.584197] env[61573]: INFO nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1842.586448] env[61573]: DEBUG nova.compute.claims [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1842.586631] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.586854] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.606163] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1842.792623] env[61573]: DEBUG oslo_vmware.rw_handles [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81757752-fad4-4a9e-bb98-2a7e12a1c62d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1842.851121] env[61573]: DEBUG oslo_vmware.rw_handles [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1842.851326] env[61573]: DEBUG oslo_vmware.rw_handles [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81757752-fad4-4a9e-bb98-2a7e12a1c62d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1842.873899] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073d9995-0bc0-4197-8401-870078d04cd7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.881972] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a500bd03-0771-4de4-bc84-78e5b0d14663 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.912587] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e17a6d6-75e8-4dd6-b621-190eb05ba05f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.919848] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f075ed-12e4-4085-b913-21c9380f99a5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.933141] env[61573]: DEBUG nova.compute.provider_tree [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.941752] env[61573]: DEBUG nova.scheduler.client.report [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1842.955869] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.369s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.956461] env[61573]: ERROR nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1842.956461] env[61573]: Faults: ['InvalidArgument'] [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Traceback (most recent call last): [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] self.driver.spawn(context, instance, image_meta, [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] self._fetch_image_if_missing(context, vi) [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] image_cache(vi, tmp_image_ds_loc) [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] vm_util.copy_virtual_disk( [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] session._wait_for_task(vmdk_copy_task) [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] return self.wait_for_task(task_ref) [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] return evt.wait() [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] result = hub.switch() [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] return self.greenlet.switch() [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] self.f(*self.args, **self.kw) [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] raise exceptions.translate_fault(task_info.error) [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Faults: ['InvalidArgument'] [ 1842.956461] env[61573]: ERROR nova.compute.manager [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] [ 1842.957488] env[61573]: DEBUG nova.compute.utils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1842.958934] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Build of instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef was re-scheduled: A specified parameter was not correct: fileType [ 1842.958934] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1842.959319] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1842.959489] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1842.959660] env[61573]: DEBUG nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1842.959822] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1843.333955] env[61573]: DEBUG nova.network.neutron [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.345675] env[61573]: INFO nova.compute.manager [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Took 0.39 seconds to deallocate network for instance. [ 1843.446786] env[61573]: INFO nova.scheduler.client.report [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleted allocations for instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef [ 1843.468970] env[61573]: DEBUG oslo_concurrency.lockutils [None req-b151755e-0337-4f01-a1c1-fbcf1c57b852 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 684.786s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.470291] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 488.910s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.470673] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.470914] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.471107] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.473381] env[61573]: INFO nova.compute.manager [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Terminating instance [ 1843.475295] env[61573]: DEBUG nova.compute.manager [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1843.475515] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1843.476089] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60a56df9-e241-4d0f-8866-eccde5a15c28 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.482567] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1843.489740] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a43373-9c76-4a37-b5d8-dce2ca706743 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.525740] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef could not be found. [ 1843.525740] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1843.526181] env[61573]: INFO nova.compute.manager [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1843.526339] env[61573]: DEBUG oslo.service.loopingcall [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1843.529014] env[61573]: DEBUG nova.compute.manager [-] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1843.529124] env[61573]: DEBUG nova.network.neutron [-] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1843.544067] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.544335] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.545853] env[61573]: INFO nova.compute.claims [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1843.555981] env[61573]: DEBUG nova.network.neutron [-] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.569026] env[61573]: INFO nova.compute.manager [-] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] Took 0.04 seconds to deallocate network for instance. [ 1843.655024] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1d66d22c-6549-415b-b6bc-6262d2b0fe88 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.655889] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 98.933s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.656096] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef] During sync_power_state the instance has a pending task (deleting). Skip. [ 1843.656271] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "e6b26294-bda0-4f4e-8e12-ac96d3c9e4ef" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.772046] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5fee46-2427-4b0d-9211-f37060be7d71 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.780216] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c14e52d-7e8b-4cea-b8dd-e4500d9d414a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.811055] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60da63d-a77e-43b7-8b0f-c0cfff7ca583 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.818104] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f40d0f-f3d3-4369-a1c0-6533629c8d74 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.831453] env[61573]: DEBUG nova.compute.provider_tree [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1843.841093] env[61573]: DEBUG nova.scheduler.client.report [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1843.854873] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.310s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.855406] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1843.926401] env[61573]: DEBUG nova.compute.utils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1843.927637] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1843.927809] env[61573]: DEBUG nova.network.neutron [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1843.938163] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1843.996771] env[61573]: DEBUG nova.policy [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '433b7a5836584a408afa8c55bd2e6625', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad53ba52560d475e9c8a48903da448ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1844.014886] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1844.042308] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1844.042562] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1844.042970] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1844.042970] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1844.043102] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1844.043224] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1844.043430] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1844.043591] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1844.043758] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1844.043925] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1844.044124] env[61573]: DEBUG nova.virt.hardware [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1844.044970] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3c9cff-d3e1-47ed-970b-8a981fcee444 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.053598] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c40d100-11c0-4845-87e5-28bebc70e8aa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.372189] env[61573]: DEBUG nova.network.neutron [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Successfully created port: 29649028-bd68-4ce9-b557-884b50150e06 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1845.289192] env[61573]: DEBUG nova.network.neutron [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Successfully updated port: 29649028-bd68-4ce9-b557-884b50150e06 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1845.300243] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquiring lock "refresh_cache-3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.300393] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquired lock "refresh_cache-3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.301031] env[61573]: DEBUG nova.network.neutron [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1845.345285] env[61573]: DEBUG nova.network.neutron [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1845.409193] env[61573]: DEBUG nova.compute.manager [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Received event network-vif-plugged-29649028-bd68-4ce9-b557-884b50150e06 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1845.409431] env[61573]: DEBUG oslo_concurrency.lockutils [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] Acquiring lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.409641] env[61573]: DEBUG oslo_concurrency.lockutils [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] Lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.409878] env[61573]: DEBUG oslo_concurrency.lockutils [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] Lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.411792] env[61573]: DEBUG nova.compute.manager [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] No waiting events found dispatching network-vif-plugged-29649028-bd68-4ce9-b557-884b50150e06 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1845.412105] env[61573]: WARNING nova.compute.manager [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Received unexpected event network-vif-plugged-29649028-bd68-4ce9-b557-884b50150e06 for instance with vm_state building and task_state spawning. [ 1845.412294] env[61573]: DEBUG nova.compute.manager [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Received event network-changed-29649028-bd68-4ce9-b557-884b50150e06 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1845.412453] env[61573]: DEBUG nova.compute.manager [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Refreshing instance network info cache due to event network-changed-29649028-bd68-4ce9-b557-884b50150e06. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1845.412628] env[61573]: DEBUG oslo_concurrency.lockutils [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] Acquiring lock "refresh_cache-3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.591173] env[61573]: DEBUG nova.network.neutron [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Updating instance_info_cache with network_info: [{"id": "29649028-bd68-4ce9-b557-884b50150e06", "address": "fa:16:3e:04:ba:70", "network": {"id": "cf6b7909-bead-44d7-9fd9-b43b1583faad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1547878504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad53ba52560d475e9c8a48903da448ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29649028-bd", "ovs_interfaceid": "29649028-bd68-4ce9-b557-884b50150e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.605460] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Releasing lock "refresh_cache-3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.605765] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Instance network_info: |[{"id": "29649028-bd68-4ce9-b557-884b50150e06", "address": "fa:16:3e:04:ba:70", "network": {"id": "cf6b7909-bead-44d7-9fd9-b43b1583faad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1547878504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad53ba52560d475e9c8a48903da448ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29649028-bd", "ovs_interfaceid": "29649028-bd68-4ce9-b557-884b50150e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1845.606077] env[61573]: DEBUG oslo_concurrency.lockutils [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] Acquired lock "refresh_cache-3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.606262] env[61573]: DEBUG nova.network.neutron [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Refreshing network info cache for port 29649028-bd68-4ce9-b557-884b50150e06 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1845.607324] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:ba:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11da2092-76f7-447e-babb-8fc14ad39a71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29649028-bd68-4ce9-b557-884b50150e06', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1845.615263] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Creating folder: Project (ad53ba52560d475e9c8a48903da448ec). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1845.618113] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-327739c6-fb92-4938-a52b-273760140369 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.629617] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Created folder: Project (ad53ba52560d475e9c8a48903da448ec) in parent group-v942801. [ 1845.629797] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Creating folder: Instances. Parent ref: group-v942903. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1845.630039] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5be3b2d-c72b-4fc2-bafa-6d9710347106 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.639507] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Created folder: Instances in parent group-v942903. [ 1845.639777] env[61573]: DEBUG oslo.service.loopingcall [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.639996] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1845.640235] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69bf5a9b-31e9-4b44-95cb-a97c80db3e8b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.660744] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1845.660744] env[61573]: value = "task-4836901" [ 1845.660744] env[61573]: _type = "Task" [ 1845.660744] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.668679] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836901, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.951697] env[61573]: DEBUG nova.network.neutron [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Updated VIF entry in instance network info cache for port 29649028-bd68-4ce9-b557-884b50150e06. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1845.952178] env[61573]: DEBUG nova.network.neutron [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Updating instance_info_cache with network_info: [{"id": "29649028-bd68-4ce9-b557-884b50150e06", "address": "fa:16:3e:04:ba:70", "network": {"id": "cf6b7909-bead-44d7-9fd9-b43b1583faad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1547878504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad53ba52560d475e9c8a48903da448ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29649028-bd", "ovs_interfaceid": "29649028-bd68-4ce9-b557-884b50150e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.964445] env[61573]: DEBUG oslo_concurrency.lockutils [req-3c97cf8a-3748-4218-b518-7764ab87394b req-62697cb6-8c27-40c8-a527-db39d69803be service nova] Releasing lock "refresh_cache-3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.171043] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836901, 'name': CreateVM_Task, 'duration_secs': 0.311341} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.171043] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1846.171574] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.171747] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.172136] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1846.172401] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d34fbd4f-65a8-4028-9ae7-8bf5f6298b57 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.177623] env[61573]: DEBUG oslo_vmware.api [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Waiting for the task: (returnval){ [ 1846.177623] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]529f0184-4ea5-f00c-15f1-ac2005537dde" [ 1846.177623] env[61573]: _type = "Task" [ 1846.177623] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.185331] env[61573]: DEBUG oslo_vmware.api [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]529f0184-4ea5-f00c-15f1-ac2005537dde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.689973] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.690434] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1846.690647] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.463030] env[61573]: WARNING oslo_vmware.rw_handles [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1891.463030] env[61573]: ERROR oslo_vmware.rw_handles [ 1891.463845] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/81757752-fad4-4a9e-bb98-2a7e12a1c62d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1891.465384] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1891.465621] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Copying Virtual Disk [datastore2] vmware_temp/81757752-fad4-4a9e-bb98-2a7e12a1c62d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/81757752-fad4-4a9e-bb98-2a7e12a1c62d/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1891.465899] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9126bb8-141a-4664-84fe-51d59af20172 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.475215] env[61573]: DEBUG oslo_vmware.api [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Waiting for the task: (returnval){ [ 1891.475215] env[61573]: value = "task-4836902" [ 1891.475215] env[61573]: _type = "Task" [ 1891.475215] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.484271] env[61573]: DEBUG oslo_vmware.api [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Task: {'id': task-4836902, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.985770] env[61573]: DEBUG oslo_vmware.exceptions [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1891.986096] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.986668] env[61573]: ERROR nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1891.986668] env[61573]: Faults: ['InvalidArgument'] [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Traceback (most recent call last): [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] yield resources [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] self.driver.spawn(context, instance, image_meta, [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] self._fetch_image_if_missing(context, vi) [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] image_cache(vi, tmp_image_ds_loc) [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] vm_util.copy_virtual_disk( [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] session._wait_for_task(vmdk_copy_task) [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] return self.wait_for_task(task_ref) [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] return evt.wait() [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] result = hub.switch() [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] return self.greenlet.switch() [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] self.f(*self.args, **self.kw) [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] raise exceptions.translate_fault(task_info.error) [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Faults: ['InvalidArgument'] [ 1891.986668] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] [ 1891.988014] env[61573]: INFO nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Terminating instance [ 1891.988594] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.988802] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1891.989069] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d08d0b4f-abfe-4afb-a126-faca23825c3c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.992543] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1891.992736] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1891.993556] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57389537-ed2d-4550-9f53-d7f91f6b7ce7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.001472] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1892.001730] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09447f99-a867-46da-a615-b37d7daf5350 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.004734] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1892.004913] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1892.005638] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0684eeb-c3fc-4283-b50b-5cfeb500df65 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.011666] env[61573]: DEBUG oslo_vmware.api [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Waiting for the task: (returnval){ [ 1892.011666] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d2a615-43f9-6abd-e2e8-79c129e05d26" [ 1892.011666] env[61573]: _type = "Task" [ 1892.011666] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.020098] env[61573]: DEBUG oslo_vmware.api [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d2a615-43f9-6abd-e2e8-79c129e05d26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.076797] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1892.077097] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1892.077240] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Deleting the datastore file [datastore2] 3e075864-6503-4d83-bbd4-f0bec8104e03 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1892.077501] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-100cc4d4-6123-4bb9-89aa-a84c52e37b30 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.084603] env[61573]: DEBUG oslo_vmware.api [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Waiting for the task: (returnval){ [ 1892.084603] env[61573]: value = "task-4836904" [ 1892.084603] env[61573]: _type = "Task" [ 1892.084603] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.093145] env[61573]: DEBUG oslo_vmware.api [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Task: {'id': task-4836904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.522879] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1892.523298] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Creating directory with path [datastore2] vmware_temp/4fdf4282-35ff-4c72-9787-bb039825efa8/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1892.523515] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c055452-52f4-4196-8bbe-f2f339059356 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.535428] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Created directory with path [datastore2] vmware_temp/4fdf4282-35ff-4c72-9787-bb039825efa8/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1892.535654] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Fetch image to [datastore2] vmware_temp/4fdf4282-35ff-4c72-9787-bb039825efa8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1892.535819] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/4fdf4282-35ff-4c72-9787-bb039825efa8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1892.536626] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9e39bf-fb04-4e8e-9d73-e1f7325c3d89 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.543834] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da187b8-1e63-4b42-95a0-94413bc28de1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.553172] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fd33fb-5659-4dcc-bfed-8f315bbe7ff6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.589234] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c243121-2fbb-4994-85b7-1310291ccacd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.598550] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6ca3e59f-0e94-462c-96a6-4a64aaa7fb66 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.600385] env[61573]: DEBUG oslo_vmware.api [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Task: {'id': task-4836904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075964} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.600632] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1892.600851] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1892.601047] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1892.601224] env[61573]: INFO nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1892.603396] env[61573]: DEBUG nova.compute.claims [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1892.603567] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.603780] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.629489] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1892.702020] env[61573]: DEBUG oslo_vmware.rw_handles [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4fdf4282-35ff-4c72-9787-bb039825efa8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1892.761234] env[61573]: DEBUG oslo_vmware.rw_handles [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1892.761427] env[61573]: DEBUG oslo_vmware.rw_handles [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4fdf4282-35ff-4c72-9787-bb039825efa8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1892.901175] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce5c7fb-8662-49cb-8338-c0637c443e63 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.910444] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf9f963-ea59-499f-874b-dae4c8314b9c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.940960] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be89423-3aad-4f7f-abca-d6938363781c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.949013] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f01b067-4503-4d00-92da-3302f6014545 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.963992] env[61573]: DEBUG nova.compute.provider_tree [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1892.973232] env[61573]: DEBUG nova.scheduler.client.report [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1892.991459] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.388s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.991992] env[61573]: ERROR nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1892.991992] env[61573]: Faults: ['InvalidArgument'] [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Traceback (most recent call last): [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] self.driver.spawn(context, instance, image_meta, [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] self._fetch_image_if_missing(context, vi) [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] image_cache(vi, tmp_image_ds_loc) [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] vm_util.copy_virtual_disk( [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] session._wait_for_task(vmdk_copy_task) [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] return self.wait_for_task(task_ref) [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] return evt.wait() [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] result = hub.switch() [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] return self.greenlet.switch() [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] self.f(*self.args, **self.kw) [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] raise exceptions.translate_fault(task_info.error) [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Faults: ['InvalidArgument'] [ 1892.991992] env[61573]: ERROR nova.compute.manager [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] [ 1892.992895] env[61573]: DEBUG nova.compute.utils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1892.994274] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Build of instance 3e075864-6503-4d83-bbd4-f0bec8104e03 was re-scheduled: A specified parameter was not correct: fileType [ 1892.994274] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1892.994657] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1892.994827] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1892.994997] env[61573]: DEBUG nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1892.995173] env[61573]: DEBUG nova.network.neutron [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1893.432802] env[61573]: DEBUG nova.network.neutron [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.447467] env[61573]: INFO nova.compute.manager [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Took 0.45 seconds to deallocate network for instance. [ 1893.561301] env[61573]: INFO nova.scheduler.client.report [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Deleted allocations for instance 3e075864-6503-4d83-bbd4-f0bec8104e03 [ 1893.587236] env[61573]: DEBUG oslo_concurrency.lockutils [None req-592a766a-704a-4faa-9b37-a6338c873659 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 694.277s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.588460] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 498.597s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.588686] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquiring lock "3e075864-6503-4d83-bbd4-f0bec8104e03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.588895] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.589116] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.591289] env[61573]: INFO nova.compute.manager [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Terminating instance [ 1893.593263] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquiring lock "refresh_cache-3e075864-6503-4d83-bbd4-f0bec8104e03" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.593467] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Acquired lock "refresh_cache-3e075864-6503-4d83-bbd4-f0bec8104e03" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.593586] env[61573]: DEBUG nova.network.neutron [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1893.598567] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1893.622385] env[61573]: DEBUG nova.network.neutron [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1893.675220] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.675573] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.677318] env[61573]: INFO nova.compute.claims [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1893.787118] env[61573]: DEBUG nova.network.neutron [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.797917] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Releasing lock "refresh_cache-3e075864-6503-4d83-bbd4-f0bec8104e03" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.797917] env[61573]: DEBUG nova.compute.manager [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1893.798204] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1893.798614] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82e40f76-e2eb-4007-926d-10ca5dcd4cfa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.809048] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bdd13c-e35e-4c3b-9316-8ae753d69e08 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.851959] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3e075864-6503-4d83-bbd4-f0bec8104e03 could not be found. [ 1893.852353] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1893.852607] env[61573]: INFO nova.compute.manager [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1893.852872] env[61573]: DEBUG oslo.service.loopingcall [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.855856] env[61573]: DEBUG nova.compute.manager [-] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1893.855963] env[61573]: DEBUG nova.network.neutron [-] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1893.876726] env[61573]: DEBUG nova.network.neutron [-] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1893.884449] env[61573]: DEBUG nova.network.neutron [-] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.894372] env[61573]: INFO nova.compute.manager [-] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] Took 0.04 seconds to deallocate network for instance. [ 1893.934065] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7668b74-46a8-492d-ae9f-e34b254e5e81 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.943022] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304050bc-26d9-4512-8250-2a1fe225d5be {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.980923] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1741fa97-9971-49e7-8ac5-5713fd916211 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.992200] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e83827-9514-49ec-b82b-eb63a928302e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.007217] env[61573]: DEBUG nova.compute.provider_tree [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.017596] env[61573]: DEBUG nova.scheduler.client.report [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1894.037382] env[61573]: DEBUG oslo_concurrency.lockutils [None req-fcd27809-f319-46c1-9450-f32927d34948 tempest-ServerRescueTestJSONUnderV235-1964296175 tempest-ServerRescueTestJSONUnderV235-1964296175-project-member] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.449s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.038304] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 149.315s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.038639] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3e075864-6503-4d83-bbd4-f0bec8104e03] During sync_power_state the instance has a pending task (deleting). Skip. [ 1894.038802] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "3e075864-6503-4d83-bbd4-f0bec8104e03" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.041776] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.366s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.042368] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1894.081988] env[61573]: DEBUG nova.compute.utils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1894.083346] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1894.083528] env[61573]: DEBUG nova.network.neutron [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1894.093838] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1894.158123] env[61573]: DEBUG nova.policy [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81607c4c942e464f82073635c8691d54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0341bbb698194bf6a4cbca166a5dfffe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1894.161445] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1894.186817] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1894.187084] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1894.187244] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1894.187429] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1894.187575] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1894.187722] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1894.187926] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1894.188103] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1894.188275] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1894.188439] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1894.188702] env[61573]: DEBUG nova.virt.hardware [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1894.189750] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b3cb23-992a-4c04-a367-4ff9df57d43d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.199548] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c40ae79-eb5a-49bd-93a8-80d13eeba893 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.403529] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.403714] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1894.403836] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1894.426427] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.426622] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.426765] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.426921] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.427598] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.427777] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.427908] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.428042] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.428172] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.428295] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1894.428415] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1894.428971] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.685950] env[61573]: DEBUG nova.network.neutron [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Successfully created port: c802956b-9f7a-4fdb-a509-b09d25880c4e {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1895.106437] env[61573]: DEBUG nova.network.neutron [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Successfully created port: 6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1895.403824] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.404152] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.404342] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1895.404582] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.417309] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.417534] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.417710] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.417893] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1895.419060] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b12aff-0206-4286-83d8-60f06b5951be {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.429425] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02abb02f-cf8a-4b28-af76-7cf2710eccf8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.444929] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af44a4ba-4c01-4c9f-99d9-ee4e1c27f5ec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.452544] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdec28c-af9a-4987-a2cc-a3eed2da8383 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.482806] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180551MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1895.482972] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.483198] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.562501] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.562689] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.562809] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.562920] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.563023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.563127] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.563281] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cef978e5-e61d-4188-a58e-1b5690731c1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.563398] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.563515] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.563626] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1895.576519] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1895.588703] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1895.589821] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1895.589821] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '78', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '6', 'num_os_type_None': '10', 'num_proj_c143aa5f5238459388ccd140702680ab': '1', 'io_workload': '10', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '2', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_33f496dcb85142ae9da6ddf8e8e9e7c9': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_task_spawning': '4', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_ad53ba52560d475e9c8a48903da448ec': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1895.768159] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31c1ee5-e93a-4276-ac85-6ddc6ff5397b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.777667] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a3b2ea-1cd1-4397-81e4-ae8cdffa0cde {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.811101] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d749d9-1437-42e5-9337-60d8ded3b319 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.821939] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580ee7d7-17bb-4163-a329-c7f3822f2790 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.827163] env[61573]: DEBUG nova.compute.manager [req-95196204-600d-4bc5-9160-d3346cd760cd req-277f3234-83fd-46a2-8f0b-d1fd5d031726 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Received event network-vif-plugged-c802956b-9f7a-4fdb-a509-b09d25880c4e {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1895.827414] env[61573]: DEBUG oslo_concurrency.lockutils [req-95196204-600d-4bc5-9160-d3346cd760cd req-277f3234-83fd-46a2-8f0b-d1fd5d031726 service nova] Acquiring lock "2dc713f6-b67b-4360-a751-29b7218e130a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.827645] env[61573]: DEBUG oslo_concurrency.lockutils [req-95196204-600d-4bc5-9160-d3346cd760cd req-277f3234-83fd-46a2-8f0b-d1fd5d031726 service nova] Lock "2dc713f6-b67b-4360-a751-29b7218e130a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.827924] env[61573]: DEBUG oslo_concurrency.lockutils [req-95196204-600d-4bc5-9160-d3346cd760cd req-277f3234-83fd-46a2-8f0b-d1fd5d031726 service nova] Lock "2dc713f6-b67b-4360-a751-29b7218e130a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.828033] env[61573]: DEBUG nova.compute.manager [req-95196204-600d-4bc5-9160-d3346cd760cd req-277f3234-83fd-46a2-8f0b-d1fd5d031726 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] No waiting events found dispatching network-vif-plugged-c802956b-9f7a-4fdb-a509-b09d25880c4e {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1895.828148] env[61573]: WARNING nova.compute.manager [req-95196204-600d-4bc5-9160-d3346cd760cd req-277f3234-83fd-46a2-8f0b-d1fd5d031726 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Received unexpected event network-vif-plugged-c802956b-9f7a-4fdb-a509-b09d25880c4e for instance with vm_state building and task_state spawning. [ 1895.839910] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1895.850057] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1895.866444] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1895.866660] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.383s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.909971] env[61573]: DEBUG nova.network.neutron [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Successfully updated port: c802956b-9f7a-4fdb-a509-b09d25880c4e {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1896.670749] env[61573]: DEBUG nova.network.neutron [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Successfully updated port: 6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1896.680979] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "refresh_cache-2dc713f6-b67b-4360-a751-29b7218e130a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.681153] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquired lock "refresh_cache-2dc713f6-b67b-4360-a751-29b7218e130a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.681304] env[61573]: DEBUG nova.network.neutron [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1896.733581] env[61573]: DEBUG nova.network.neutron [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1897.188969] env[61573]: DEBUG nova.network.neutron [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Updating instance_info_cache with network_info: [{"id": "c802956b-9f7a-4fdb-a509-b09d25880c4e", "address": "fa:16:3e:0c:c7:13", "network": {"id": "87747f12-2d9a-4aca-80e3-a0265c50451a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-487652707", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc802956b-9f", "ovs_interfaceid": "c802956b-9f7a-4fdb-a509-b09d25880c4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54", "address": "fa:16:3e:7c:f6:fd", "network": {"id": "9e0d0a22-a211-44ac-b718-5054729b9b99", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1137573529", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae449ee-c3", "ovs_interfaceid": "6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.204760] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Releasing lock "refresh_cache-2dc713f6-b67b-4360-a751-29b7218e130a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.205073] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Instance network_info: |[{"id": "c802956b-9f7a-4fdb-a509-b09d25880c4e", "address": "fa:16:3e:0c:c7:13", "network": {"id": "87747f12-2d9a-4aca-80e3-a0265c50451a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-487652707", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc802956b-9f", "ovs_interfaceid": "c802956b-9f7a-4fdb-a509-b09d25880c4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54", "address": "fa:16:3e:7c:f6:fd", "network": {"id": "9e0d0a22-a211-44ac-b718-5054729b9b99", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1137573529", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae449ee-c3", "ovs_interfaceid": "6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1897.205531] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:c7:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39cd75b0-9ec7-48ed-b57f-34da0c573a60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c802956b-9f7a-4fdb-a509-b09d25880c4e', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:f6:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1897.214750] env[61573]: DEBUG oslo.service.loopingcall [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1897.215350] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1897.215609] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c351451-4c50-403e-8675-af8a8a42c260 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.238473] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1897.238473] env[61573]: value = "task-4836905" [ 1897.238473] env[61573]: _type = "Task" [ 1897.238473] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.247356] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836905, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.749381] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836905, 'name': CreateVM_Task, 'duration_secs': 0.382462} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.749549] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1897.750353] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.750526] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.750860] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1897.751128] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29deed8c-0824-4254-add6-48d97cac6c09 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.756468] env[61573]: DEBUG oslo_vmware.api [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for the task: (returnval){ [ 1897.756468] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cfd157-aea2-a21b-6864-cb4fb8be2e7f" [ 1897.756468] env[61573]: _type = "Task" [ 1897.756468] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.767143] env[61573]: DEBUG oslo_vmware.api [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52cfd157-aea2-a21b-6864-cb4fb8be2e7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.850846] env[61573]: DEBUG nova.compute.manager [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Received event network-changed-c802956b-9f7a-4fdb-a509-b09d25880c4e {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1897.851064] env[61573]: DEBUG nova.compute.manager [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Refreshing instance network info cache due to event network-changed-c802956b-9f7a-4fdb-a509-b09d25880c4e. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1897.851280] env[61573]: DEBUG oslo_concurrency.lockutils [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] Acquiring lock "refresh_cache-2dc713f6-b67b-4360-a751-29b7218e130a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.851422] env[61573]: DEBUG oslo_concurrency.lockutils [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] Acquired lock "refresh_cache-2dc713f6-b67b-4360-a751-29b7218e130a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.851582] env[61573]: DEBUG nova.network.neutron [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Refreshing network info cache for port c802956b-9f7a-4fdb-a509-b09d25880c4e {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1897.866564] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.866761] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1898.172492] env[61573]: DEBUG nova.network.neutron [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Updated VIF entry in instance network info cache for port c802956b-9f7a-4fdb-a509-b09d25880c4e. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1898.172915] env[61573]: DEBUG nova.network.neutron [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Updating instance_info_cache with network_info: [{"id": "c802956b-9f7a-4fdb-a509-b09d25880c4e", "address": "fa:16:3e:0c:c7:13", "network": {"id": "87747f12-2d9a-4aca-80e3-a0265c50451a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-487652707", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc802956b-9f", "ovs_interfaceid": "c802956b-9f7a-4fdb-a509-b09d25880c4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54", "address": "fa:16:3e:7c:f6:fd", "network": {"id": "9e0d0a22-a211-44ac-b718-5054729b9b99", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1137573529", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae449ee-c3", "ovs_interfaceid": "6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.183618] env[61573]: DEBUG oslo_concurrency.lockutils [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] Releasing lock "refresh_cache-2dc713f6-b67b-4360-a751-29b7218e130a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.183874] env[61573]: DEBUG nova.compute.manager [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Received event network-vif-plugged-6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1898.184079] env[61573]: DEBUG oslo_concurrency.lockutils [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] Acquiring lock "2dc713f6-b67b-4360-a751-29b7218e130a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.184286] env[61573]: DEBUG oslo_concurrency.lockutils [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] Lock "2dc713f6-b67b-4360-a751-29b7218e130a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.184449] env[61573]: DEBUG oslo_concurrency.lockutils [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] Lock "2dc713f6-b67b-4360-a751-29b7218e130a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.184621] env[61573]: DEBUG nova.compute.manager [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] No waiting events found dispatching network-vif-plugged-6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1898.184779] env[61573]: WARNING nova.compute.manager [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Received unexpected event network-vif-plugged-6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54 for instance with vm_state building and task_state spawning. [ 1898.184937] env[61573]: DEBUG nova.compute.manager [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Received event network-changed-6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1898.185103] env[61573]: DEBUG nova.compute.manager [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Refreshing instance network info cache due to event network-changed-6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1898.185281] env[61573]: DEBUG oslo_concurrency.lockutils [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] Acquiring lock "refresh_cache-2dc713f6-b67b-4360-a751-29b7218e130a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.185418] env[61573]: DEBUG oslo_concurrency.lockutils [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] Acquired lock "refresh_cache-2dc713f6-b67b-4360-a751-29b7218e130a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.185577] env[61573]: DEBUG nova.network.neutron [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Refreshing network info cache for port 6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1898.267940] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.268313] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1898.268443] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.713458] env[61573]: DEBUG nova.network.neutron [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Updated VIF entry in instance network info cache for port 6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1898.713953] env[61573]: DEBUG nova.network.neutron [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Updating instance_info_cache with network_info: [{"id": "c802956b-9f7a-4fdb-a509-b09d25880c4e", "address": "fa:16:3e:0c:c7:13", "network": {"id": "87747f12-2d9a-4aca-80e3-a0265c50451a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-487652707", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc802956b-9f", "ovs_interfaceid": "c802956b-9f7a-4fdb-a509-b09d25880c4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54", "address": "fa:16:3e:7c:f6:fd", "network": {"id": "9e0d0a22-a211-44ac-b718-5054729b9b99", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1137573529", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0341bbb698194bf6a4cbca166a5dfffe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae449ee-c3", "ovs_interfaceid": "6ae449ee-c3d7-4c11-bcc8-14d1ccddfc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.726375] env[61573]: DEBUG oslo_concurrency.lockutils [req-5ccda1ed-0d5f-42c6-b842-71454daccfe8 req-8f9e1f8c-85b6-4806-84fe-a74a678eb680 service nova] Releasing lock "refresh_cache-2dc713f6-b67b-4360-a751-29b7218e130a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.404098] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1900.399496] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1939.938398] env[61573]: WARNING oslo_vmware.rw_handles [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1939.938398] env[61573]: ERROR oslo_vmware.rw_handles [ 1939.939406] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/4fdf4282-35ff-4c72-9787-bb039825efa8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1939.940893] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1939.941197] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Copying Virtual Disk [datastore2] vmware_temp/4fdf4282-35ff-4c72-9787-bb039825efa8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/4fdf4282-35ff-4c72-9787-bb039825efa8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1939.941495] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e997b4e-4932-44af-aa5f-6b3f78c6b08b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.950295] env[61573]: DEBUG oslo_vmware.api [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Waiting for the task: (returnval){ [ 1939.950295] env[61573]: value = "task-4836906" [ 1939.950295] env[61573]: _type = "Task" [ 1939.950295] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.958400] env[61573]: DEBUG oslo_vmware.api [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Task: {'id': task-4836906, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.461628] env[61573]: DEBUG oslo_vmware.exceptions [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1940.461855] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.462455] env[61573]: ERROR nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1940.462455] env[61573]: Faults: ['InvalidArgument'] [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Traceback (most recent call last): [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] yield resources [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] self.driver.spawn(context, instance, image_meta, [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] self._fetch_image_if_missing(context, vi) [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] image_cache(vi, tmp_image_ds_loc) [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] vm_util.copy_virtual_disk( [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] session._wait_for_task(vmdk_copy_task) [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] return self.wait_for_task(task_ref) [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] return evt.wait() [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] result = hub.switch() [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] return self.greenlet.switch() [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] self.f(*self.args, **self.kw) [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] raise exceptions.translate_fault(task_info.error) [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Faults: ['InvalidArgument'] [ 1940.462455] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] [ 1940.463526] env[61573]: INFO nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Terminating instance [ 1940.464460] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.464665] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1940.464909] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ece8ee3-67bf-4d91-b790-c62d05b3d84c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.467167] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1940.467360] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1940.468127] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da90fe8-d73b-482e-8980-f395a018791e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.475346] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1940.475619] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91e7ef32-4617-414d-a502-eb7d0204c06b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.477861] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1940.478045] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1940.478993] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-880f7350-cba4-453d-ab43-a6e2c4041321 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.483921] env[61573]: DEBUG oslo_vmware.api [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for the task: (returnval){ [ 1940.483921] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52751f39-5098-b1eb-2874-5772a1878001" [ 1940.483921] env[61573]: _type = "Task" [ 1940.483921] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.491864] env[61573]: DEBUG oslo_vmware.api [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52751f39-5098-b1eb-2874-5772a1878001, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.546605] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1940.546831] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1940.546991] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Deleting the datastore file [datastore2] d2350c0d-8c21-4dc2-b0d4-01f98799ab6d {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1940.547296] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9a83a27-a8db-43a9-beba-cd2626a17cd0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.553829] env[61573]: DEBUG oslo_vmware.api [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Waiting for the task: (returnval){ [ 1940.553829] env[61573]: value = "task-4836908" [ 1940.553829] env[61573]: _type = "Task" [ 1940.553829] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.562021] env[61573]: DEBUG oslo_vmware.api [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Task: {'id': task-4836908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.994671] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1940.995111] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Creating directory with path [datastore2] vmware_temp/4e1ea0e0-2da2-497a-97a5-3300bde14f79/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1940.995196] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44da3103-d4f1-4b86-bd76-89100e1418d0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.007233] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Created directory with path [datastore2] vmware_temp/4e1ea0e0-2da2-497a-97a5-3300bde14f79/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1941.007423] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Fetch image to [datastore2] vmware_temp/4e1ea0e0-2da2-497a-97a5-3300bde14f79/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1941.007588] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/4e1ea0e0-2da2-497a-97a5-3300bde14f79/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1941.008436] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75568657-915f-4a53-8105-46c142c58dd4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.016060] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9b3588-2479-476c-99ef-ae7e204c4a1d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.025683] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c23fa49-c140-4db8-a1a5-1b4c8ee2dd2d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.061022] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9dca2b-eadf-41d8-90ac-4bd1f4999ee5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.070770] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d3676b37-b844-48fb-9d49-58462f1ea55d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.072640] env[61573]: DEBUG oslo_vmware.api [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Task: {'id': task-4836908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075028} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.072889] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1941.073093] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1941.073266] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1941.073436] env[61573]: INFO nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1941.075695] env[61573]: DEBUG nova.compute.claims [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1941.075875] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.076100] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.095026] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1941.159607] env[61573]: DEBUG oslo_vmware.rw_handles [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e1ea0e0-2da2-497a-97a5-3300bde14f79/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1941.218951] env[61573]: DEBUG oslo_vmware.rw_handles [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1941.219149] env[61573]: DEBUG oslo_vmware.rw_handles [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e1ea0e0-2da2-497a-97a5-3300bde14f79/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1941.359428] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5db956-14f6-4827-86db-c1230da9b0e6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.368171] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c869b92-554a-43e9-8f31-51586da5e342 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.398838] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b199a556-5b4a-42f4-93d5-c013f2a092cf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.407032] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a87fef-bc1c-4fe5-82fd-e1d5bccc0315 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.422166] env[61573]: DEBUG nova.compute.provider_tree [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1941.431218] env[61573]: DEBUG nova.scheduler.client.report [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1941.450734] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.374s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.451467] env[61573]: ERROR nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1941.451467] env[61573]: Faults: ['InvalidArgument'] [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Traceback (most recent call last): [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] self.driver.spawn(context, instance, image_meta, [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] self._fetch_image_if_missing(context, vi) [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] image_cache(vi, tmp_image_ds_loc) [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] vm_util.copy_virtual_disk( [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] session._wait_for_task(vmdk_copy_task) [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] return self.wait_for_task(task_ref) [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] return evt.wait() [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] result = hub.switch() [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] return self.greenlet.switch() [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] self.f(*self.args, **self.kw) [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] raise exceptions.translate_fault(task_info.error) [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Faults: ['InvalidArgument'] [ 1941.451467] env[61573]: ERROR nova.compute.manager [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] [ 1941.452464] env[61573]: DEBUG nova.compute.utils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1941.453848] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Build of instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d was re-scheduled: A specified parameter was not correct: fileType [ 1941.453848] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1941.454242] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1941.454419] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1941.454592] env[61573]: DEBUG nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1941.454761] env[61573]: DEBUG nova.network.neutron [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1941.834573] env[61573]: DEBUG nova.network.neutron [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.848349] env[61573]: INFO nova.compute.manager [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Took 0.39 seconds to deallocate network for instance. [ 1941.961294] env[61573]: INFO nova.scheduler.client.report [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Deleted allocations for instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d [ 1941.985773] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e9fe8026-d6d6-4f0d-bda6-012e3d7a49fd tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 574.622s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.987033] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 378.116s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.987261] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Acquiring lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.987470] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.987640] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.989973] env[61573]: INFO nova.compute.manager [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Terminating instance [ 1941.992980] env[61573]: DEBUG nova.compute.manager [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1941.992980] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1941.993380] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c4ace1c-79c9-45c9-a01a-83773387aff6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.004788] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e62e8a-1ef4-4f4e-ad9a-2718fb0bf017 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.016045] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1942.039730] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d2350c0d-8c21-4dc2-b0d4-01f98799ab6d could not be found. [ 1942.039952] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1942.040154] env[61573]: INFO nova.compute.manager [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1942.040408] env[61573]: DEBUG oslo.service.loopingcall [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1942.040670] env[61573]: DEBUG nova.compute.manager [-] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1942.040832] env[61573]: DEBUG nova.network.neutron [-] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1942.070515] env[61573]: DEBUG nova.network.neutron [-] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.079328] env[61573]: INFO nova.compute.manager [-] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] Took 0.04 seconds to deallocate network for instance. [ 1942.081568] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.081796] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.083284] env[61573]: INFO nova.compute.claims [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1942.179067] env[61573]: DEBUG oslo_concurrency.lockutils [None req-7325cc90-c31a-4e27-a441-a99f68a12656 tempest-ImagesOneServerNegativeTestJSON-1392310591 tempest-ImagesOneServerNegativeTestJSON-1392310591-project-member] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.180065] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 197.457s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.180266] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d2350c0d-8c21-4dc2-b0d4-01f98799ab6d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1942.180266] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "d2350c0d-8c21-4dc2-b0d4-01f98799ab6d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.280421] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ccd4aa-9645-4a9e-9488-3d18d14c3359 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.288460] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2064f05-a6c1-4c3b-b4e7-81100508707c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.321199] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9878b453-db65-489c-b12e-603037df6d5d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.329780] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245ba382-17e7-45ff-b04b-61e8367a55a6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.343915] env[61573]: DEBUG nova.compute.provider_tree [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1942.352990] env[61573]: DEBUG nova.scheduler.client.report [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1942.367972] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.286s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.368502] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1942.401985] env[61573]: DEBUG nova.compute.utils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1942.403805] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1942.403805] env[61573]: DEBUG nova.network.neutron [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1942.414984] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1942.482140] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1942.490303] env[61573]: DEBUG nova.policy [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31cd6718df1c44f3ba30eb71f36cdb69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8caf6003840413c8eff7d84d9b185cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1942.508186] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1942.508448] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1942.508605] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1942.508782] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1942.508927] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1942.509576] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1942.509843] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1942.510026] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1942.510204] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1942.510373] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1942.510557] env[61573]: DEBUG nova.virt.hardware [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1942.511462] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17448be8-2c44-459e-9652-e1b3dc5ad7b7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.520935] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04d7fa3-214d-4da7-b3d8-a2d1a5c5e5f3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.159245] env[61573]: DEBUG nova.network.neutron [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Successfully created port: 1ba503e5-6583-4a56-9691-984a7315a4ce {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1943.878974] env[61573]: DEBUG nova.compute.manager [req-b3217c8f-1e03-4e75-8111-4cfaca9ea2ca req-2cba8434-4844-48a2-a770-03532eda215b service nova] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Received event network-vif-plugged-1ba503e5-6583-4a56-9691-984a7315a4ce {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1943.879270] env[61573]: DEBUG oslo_concurrency.lockutils [req-b3217c8f-1e03-4e75-8111-4cfaca9ea2ca req-2cba8434-4844-48a2-a770-03532eda215b service nova] Acquiring lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.879412] env[61573]: DEBUG oslo_concurrency.lockutils [req-b3217c8f-1e03-4e75-8111-4cfaca9ea2ca req-2cba8434-4844-48a2-a770-03532eda215b service nova] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.879578] env[61573]: DEBUG oslo_concurrency.lockutils [req-b3217c8f-1e03-4e75-8111-4cfaca9ea2ca req-2cba8434-4844-48a2-a770-03532eda215b service nova] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.879742] env[61573]: DEBUG nova.compute.manager [req-b3217c8f-1e03-4e75-8111-4cfaca9ea2ca req-2cba8434-4844-48a2-a770-03532eda215b service nova] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] No waiting events found dispatching network-vif-plugged-1ba503e5-6583-4a56-9691-984a7315a4ce {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1943.879903] env[61573]: WARNING nova.compute.manager [req-b3217c8f-1e03-4e75-8111-4cfaca9ea2ca req-2cba8434-4844-48a2-a770-03532eda215b service nova] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Received unexpected event network-vif-plugged-1ba503e5-6583-4a56-9691-984a7315a4ce for instance with vm_state building and task_state spawning. [ 1943.934639] env[61573]: DEBUG nova.network.neutron [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Successfully updated port: 1ba503e5-6583-4a56-9691-984a7315a4ce {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1943.970026] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "refresh_cache-d80e3b10-95a8-45b8-84d2-6221ba33f2d7" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.970026] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquired lock "refresh_cache-d80e3b10-95a8-45b8-84d2-6221ba33f2d7" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.970026] env[61573]: DEBUG nova.network.neutron [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1944.027574] env[61573]: DEBUG nova.network.neutron [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1944.219050] env[61573]: DEBUG nova.network.neutron [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Updating instance_info_cache with network_info: [{"id": "1ba503e5-6583-4a56-9691-984a7315a4ce", "address": "fa:16:3e:b9:45:66", "network": {"id": "78bbca97-89ba-4b32-a8e8-f83e40d94593", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-851977150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8caf6003840413c8eff7d84d9b185cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ba503e5-65", "ovs_interfaceid": "1ba503e5-6583-4a56-9691-984a7315a4ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1944.231392] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Releasing lock "refresh_cache-d80e3b10-95a8-45b8-84d2-6221ba33f2d7" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1944.231856] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Instance network_info: |[{"id": "1ba503e5-6583-4a56-9691-984a7315a4ce", "address": "fa:16:3e:b9:45:66", "network": {"id": "78bbca97-89ba-4b32-a8e8-f83e40d94593", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-851977150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8caf6003840413c8eff7d84d9b185cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ba503e5-65", "ovs_interfaceid": "1ba503e5-6583-4a56-9691-984a7315a4ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1944.232127] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:45:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '664c466b-9417-49d7-83cc-364d964c403a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ba503e5-6583-4a56-9691-984a7315a4ce', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1944.240298] env[61573]: DEBUG oslo.service.loopingcall [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1944.240868] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1944.241279] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc38cc64-db97-4d80-8788-0a659dc36883 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.263152] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1944.263152] env[61573]: value = "task-4836909" [ 1944.263152] env[61573]: _type = "Task" [ 1944.263152] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.271944] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836909, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.774765] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836909, 'name': CreateVM_Task, 'duration_secs': 0.296862} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.774984] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1944.775668] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1944.775837] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1944.776190] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1944.776476] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc6a1e09-6aca-41c3-96bc-8ba44c93823e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.781750] env[61573]: DEBUG oslo_vmware.api [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for the task: (returnval){ [ 1944.781750] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52a68ead-0da6-b3f0-10d0-3a0fe6d60020" [ 1944.781750] env[61573]: _type = "Task" [ 1944.781750] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.790384] env[61573]: DEBUG oslo_vmware.api [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52a68ead-0da6-b3f0-10d0-3a0fe6d60020, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.890196] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "4491bc7f-5014-4631-a7ec-486928ef0cf9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.978668] env[61573]: DEBUG oslo_concurrency.lockutils [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "cef978e5-e61d-4188-a58e-1b5690731c1b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.297704] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.298169] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1945.298543] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.905182] env[61573]: DEBUG nova.compute.manager [req-f08308bc-453a-4a59-bc30-9ae02ae58089 req-de410249-2a64-4989-ad86-1cd18bce99b6 service nova] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Received event network-changed-1ba503e5-6583-4a56-9691-984a7315a4ce {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1945.905379] env[61573]: DEBUG nova.compute.manager [req-f08308bc-453a-4a59-bc30-9ae02ae58089 req-de410249-2a64-4989-ad86-1cd18bce99b6 service nova] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Refreshing instance network info cache due to event network-changed-1ba503e5-6583-4a56-9691-984a7315a4ce. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1945.905593] env[61573]: DEBUG oslo_concurrency.lockutils [req-f08308bc-453a-4a59-bc30-9ae02ae58089 req-de410249-2a64-4989-ad86-1cd18bce99b6 service nova] Acquiring lock "refresh_cache-d80e3b10-95a8-45b8-84d2-6221ba33f2d7" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.905734] env[61573]: DEBUG oslo_concurrency.lockutils [req-f08308bc-453a-4a59-bc30-9ae02ae58089 req-de410249-2a64-4989-ad86-1cd18bce99b6 service nova] Acquired lock "refresh_cache-d80e3b10-95a8-45b8-84d2-6221ba33f2d7" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.905950] env[61573]: DEBUG nova.network.neutron [req-f08308bc-453a-4a59-bc30-9ae02ae58089 req-de410249-2a64-4989-ad86-1cd18bce99b6 service nova] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Refreshing network info cache for port 1ba503e5-6583-4a56-9691-984a7315a4ce {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1946.276180] env[61573]: DEBUG nova.network.neutron [req-f08308bc-453a-4a59-bc30-9ae02ae58089 req-de410249-2a64-4989-ad86-1cd18bce99b6 service nova] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Updated VIF entry in instance network info cache for port 1ba503e5-6583-4a56-9691-984a7315a4ce. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1946.276180] env[61573]: DEBUG nova.network.neutron [req-f08308bc-453a-4a59-bc30-9ae02ae58089 req-de410249-2a64-4989-ad86-1cd18bce99b6 service nova] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Updating instance_info_cache with network_info: [{"id": "1ba503e5-6583-4a56-9691-984a7315a4ce", "address": "fa:16:3e:b9:45:66", "network": {"id": "78bbca97-89ba-4b32-a8e8-f83e40d94593", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-851977150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8caf6003840413c8eff7d84d9b185cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ba503e5-65", "ovs_interfaceid": "1ba503e5-6583-4a56-9691-984a7315a4ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.286342] env[61573]: DEBUG oslo_concurrency.lockutils [req-f08308bc-453a-4a59-bc30-9ae02ae58089 req-de410249-2a64-4989-ad86-1cd18bce99b6 service nova] Releasing lock "refresh_cache-d80e3b10-95a8-45b8-84d2-6221ba33f2d7" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.404022] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.404022] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 1955.404022] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1955.425245] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.425525] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.427093] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.427093] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.427093] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.427093] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.427093] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.427093] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.427093] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.427093] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1955.427093] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 1956.404083] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.404083] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.404437] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.416295] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.416511] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.416675] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.416829] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1956.417959] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01ddb4e-58fe-42bc-804e-5e5335573870 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.426646] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8bc2a1-4bde-4cbd-98f1-631bc3764278 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.440899] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1097b86-837c-4749-bfca-9a506b754999 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.447645] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93210109-b456-4cc8-bff2-43c03d8d9d75 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.477028] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180551MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1956.477028] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.477258] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.556225] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.556225] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.556439] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.556439] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.557201] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.557201] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cef978e5-e61d-4188-a58e-1b5690731c1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.557201] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.557201] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.557201] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.557201] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1956.569655] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1956.569931] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1956.570117] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '79', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '7', 'num_os_type_None': '10', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '2', 'io_workload': '10', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '2', 'num_proj_33f496dcb85142ae9da6ddf8e8e9e7c9': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_task_spawning': '3', 'num_proj_ad53ba52560d475e9c8a48903da448ec': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1956.721646] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c1b427-5d69-4baf-818f-6737e7f3ab29 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.729372] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b6f209-c916-4c81-966b-52fd49b8f323 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.758244] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbca20aa-317a-43b6-a1ee-851fc724ed9e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.765345] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15278af3-52d3-439e-b02c-5331549bb7fd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.778574] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1956.787154] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1956.802022] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1956.802233] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.325s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.802275] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.802650] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 1958.399881] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.404019] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.404414] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1960.404352] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1961.400016] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1963.680518] env[61573]: DEBUG oslo_concurrency.lockutils [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquiring lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.175955] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "2dc713f6-b67b-4360-a751-29b7218e130a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.008010] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.586925] env[61573]: WARNING oslo_vmware.rw_handles [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1988.586925] env[61573]: ERROR oslo_vmware.rw_handles [ 1988.587773] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/4e1ea0e0-2da2-497a-97a5-3300bde14f79/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1988.589643] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1988.589885] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Copying Virtual Disk [datastore2] vmware_temp/4e1ea0e0-2da2-497a-97a5-3300bde14f79/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/4e1ea0e0-2da2-497a-97a5-3300bde14f79/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1988.590236] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1df5f85-dbc5-4634-993c-72d2516b6f46 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.601230] env[61573]: DEBUG oslo_vmware.api [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for the task: (returnval){ [ 1988.601230] env[61573]: value = "task-4836910" [ 1988.601230] env[61573]: _type = "Task" [ 1988.601230] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.610414] env[61573]: DEBUG oslo_vmware.api [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Task: {'id': task-4836910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.111695] env[61573]: DEBUG oslo_vmware.exceptions [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1989.111974] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.112627] env[61573]: ERROR nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1989.112627] env[61573]: Faults: ['InvalidArgument'] [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Traceback (most recent call last): [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] yield resources [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] self.driver.spawn(context, instance, image_meta, [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] self._fetch_image_if_missing(context, vi) [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] image_cache(vi, tmp_image_ds_loc) [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] vm_util.copy_virtual_disk( [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] session._wait_for_task(vmdk_copy_task) [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] return self.wait_for_task(task_ref) [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] return evt.wait() [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] result = hub.switch() [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] return self.greenlet.switch() [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] self.f(*self.args, **self.kw) [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] raise exceptions.translate_fault(task_info.error) [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Faults: ['InvalidArgument'] [ 1989.112627] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] [ 1989.113701] env[61573]: INFO nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Terminating instance [ 1989.114613] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.114813] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1989.115068] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f50ab753-cde3-49bc-8951-5578873eeeee {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.117491] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1989.117687] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1989.118420] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4aabe82-743e-451a-9643-371e3f94f35d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.125171] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1989.125385] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87ab8e1f-98c0-4946-a9d5-a0d3c9c9316b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.127544] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1989.127724] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1989.128648] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df9e87ed-8dc8-4331-bed9-2b5a244e9d29 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.135019] env[61573]: DEBUG oslo_vmware.api [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for the task: (returnval){ [ 1989.135019] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52f2fb45-5aae-86c8-1211-fb21e4444083" [ 1989.135019] env[61573]: _type = "Task" [ 1989.135019] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.140920] env[61573]: DEBUG oslo_vmware.api [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52f2fb45-5aae-86c8-1211-fb21e4444083, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.277341] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1989.277579] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1989.277760] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Deleting the datastore file [datastore2] adb2282f-224e-4a56-abd8-cd91bd0023f0 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1989.278049] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-602abfa5-0cd1-4744-b379-f538548255da {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.285364] env[61573]: DEBUG oslo_vmware.api [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for the task: (returnval){ [ 1989.285364] env[61573]: value = "task-4836912" [ 1989.285364] env[61573]: _type = "Task" [ 1989.285364] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.294704] env[61573]: DEBUG oslo_vmware.api [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Task: {'id': task-4836912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.645185] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1989.645642] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Creating directory with path [datastore2] vmware_temp/79fe14a6-7f4e-440c-b079-86081cb275f8/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1989.645690] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39e647c9-7982-4b60-8774-3bf2bf9a2ccc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.657741] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Created directory with path [datastore2] vmware_temp/79fe14a6-7f4e-440c-b079-86081cb275f8/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1989.657997] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Fetch image to [datastore2] vmware_temp/79fe14a6-7f4e-440c-b079-86081cb275f8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1989.658134] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/79fe14a6-7f4e-440c-b079-86081cb275f8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1989.658912] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48705154-b2b6-4fab-8edc-2bbccd51beb4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.666232] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f55f4ea-0bd8-45bd-b342-c91d09213272 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.675840] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d0efd3-792f-416b-8a3a-f17e9ef4d60c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.705994] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2859e42-ed25-46a5-b6a1-035aa327a7cc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.712065] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bf60a804-a052-4417-9897-2d284edba363 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.733922] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1989.790222] env[61573]: DEBUG oslo_vmware.rw_handles [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/79fe14a6-7f4e-440c-b079-86081cb275f8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1989.848013] env[61573]: DEBUG oslo_vmware.api [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Task: {'id': task-4836912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093535} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.849430] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1989.849643] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1989.849850] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1989.850049] env[61573]: INFO nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Took 0.73 seconds to destroy the instance on the hypervisor. [ 1989.851932] env[61573]: DEBUG oslo_vmware.rw_handles [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1989.852125] env[61573]: DEBUG oslo_vmware.rw_handles [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/79fe14a6-7f4e-440c-b079-86081cb275f8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1989.852688] env[61573]: DEBUG nova.compute.claims [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1989.852851] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.853113] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.055253] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe566c9-f912-441e-adf9-361fa3278be8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.063885] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2301113f-5d72-458c-a96f-84ef3e231139 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.095855] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d34a16-4ba7-4f96-bae4-fcaa72165ef5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.103553] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95eabebc-c778-44a1-b582-d501aa9ae2fc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.116914] env[61573]: DEBUG nova.compute.provider_tree [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1990.128079] env[61573]: DEBUG nova.scheduler.client.report [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1990.145437] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.292s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.145990] env[61573]: ERROR nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1990.145990] env[61573]: Faults: ['InvalidArgument'] [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Traceback (most recent call last): [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] self.driver.spawn(context, instance, image_meta, [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] self._fetch_image_if_missing(context, vi) [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] image_cache(vi, tmp_image_ds_loc) [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] vm_util.copy_virtual_disk( [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] session._wait_for_task(vmdk_copy_task) [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] return self.wait_for_task(task_ref) [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] return evt.wait() [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] result = hub.switch() [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] return self.greenlet.switch() [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] self.f(*self.args, **self.kw) [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] raise exceptions.translate_fault(task_info.error) [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Faults: ['InvalidArgument'] [ 1990.145990] env[61573]: ERROR nova.compute.manager [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] [ 1990.146998] env[61573]: DEBUG nova.compute.utils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1990.148298] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Build of instance adb2282f-224e-4a56-abd8-cd91bd0023f0 was re-scheduled: A specified parameter was not correct: fileType [ 1990.148298] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1990.148664] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1990.148836] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1990.148999] env[61573]: DEBUG nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1990.149184] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1991.256452] env[61573]: DEBUG nova.network.neutron [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.272923] env[61573]: INFO nova.compute.manager [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Took 1.12 seconds to deallocate network for instance. [ 1991.378977] env[61573]: INFO nova.scheduler.client.report [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Deleted allocations for instance adb2282f-224e-4a56-abd8-cd91bd0023f0 [ 1991.406051] env[61573]: DEBUG oslo_concurrency.lockutils [None req-31bf4557-f0ad-4463-93ab-51b467e29a19 tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 611.650s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.407290] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 415.869s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.407613] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.407841] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.408016] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.410062] env[61573]: INFO nova.compute.manager [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Terminating instance [ 1991.412745] env[61573]: DEBUG nova.compute.manager [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1991.413266] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1991.413266] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea5602f6-3268-4be4-8387-753923765ba4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.424770] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf0531e-34cb-484a-bcd0-85a47e3647a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.435830] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1991.460079] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance adb2282f-224e-4a56-abd8-cd91bd0023f0 could not be found. [ 1991.460302] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1991.460486] env[61573]: INFO nova.compute.manager [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1991.460739] env[61573]: DEBUG oslo.service.loopingcall [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1991.460989] env[61573]: DEBUG nova.compute.manager [-] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1991.461097] env[61573]: DEBUG nova.network.neutron [-] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1991.488454] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.488698] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.490189] env[61573]: INFO nova.compute.claims [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1991.493271] env[61573]: DEBUG nova.network.neutron [-] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.504025] env[61573]: INFO nova.compute.manager [-] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] Took 0.04 seconds to deallocate network for instance. [ 1991.606394] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a4061527-e400-44cb-b752-1c787d248caf tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.199s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.607150] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 246.884s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.607343] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: adb2282f-224e-4a56-abd8-cd91bd0023f0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1991.607516] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "adb2282f-224e-4a56-abd8-cd91bd0023f0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.689437] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62bda6a-8d1e-45f7-867c-cf94667e1069 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.697892] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39911759-3c16-4b02-aed6-f0d9f344678a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.727575] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cfd40c-6574-4b74-b68e-13ab0d4e803f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.735552] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66fcdea-85f8-4b84-8342-808d96e60409 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.750606] env[61573]: DEBUG nova.compute.provider_tree [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1991.760424] env[61573]: DEBUG nova.scheduler.client.report [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1991.777292] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.288s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.777823] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1991.813030] env[61573]: DEBUG nova.compute.utils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1991.814758] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1991.814758] env[61573]: DEBUG nova.network.neutron [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1991.824794] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1991.912991] env[61573]: DEBUG nova.policy [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '904cef1fe2eb4eb7aba180903b3d65c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7b0a7b0d400a42c9b32dc3f491d17d74', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 1991.919369] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1991.950362] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1991.950626] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1991.950781] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1991.950974] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1991.951135] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1991.951336] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1991.951515] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1991.951673] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1991.951851] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1991.952031] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1991.952232] env[61573]: DEBUG nova.virt.hardware [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1991.953168] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fb5873-30a1-4ea4-b1e8-1035692d66fe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.962197] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96cb4679-bbb7-48da-b796-f3cea8bf85e5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.359125] env[61573]: DEBUG nova.network.neutron [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Successfully created port: 916a1330-268c-4073-9168-b0a8c3efb887 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1993.394210] env[61573]: DEBUG nova.network.neutron [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Successfully updated port: 916a1330-268c-4073-9168-b0a8c3efb887 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1993.414290] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquiring lock "refresh_cache-c3053874-e935-40c5-ac81-268e759611f1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1993.414386] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquired lock "refresh_cache-c3053874-e935-40c5-ac81-268e759611f1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1993.414512] env[61573]: DEBUG nova.network.neutron [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1993.479376] env[61573]: DEBUG nova.network.neutron [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1993.744582] env[61573]: DEBUG nova.network.neutron [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Updating instance_info_cache with network_info: [{"id": "916a1330-268c-4073-9168-b0a8c3efb887", "address": "fa:16:3e:e1:a5:70", "network": {"id": "db5231c1-a273-4914-8489-af82fbe39e62", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-667920786-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7b0a7b0d400a42c9b32dc3f491d17d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap916a1330-26", "ovs_interfaceid": "916a1330-268c-4073-9168-b0a8c3efb887", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.757347] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Releasing lock "refresh_cache-c3053874-e935-40c5-ac81-268e759611f1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1993.757705] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Instance network_info: |[{"id": "916a1330-268c-4073-9168-b0a8c3efb887", "address": "fa:16:3e:e1:a5:70", "network": {"id": "db5231c1-a273-4914-8489-af82fbe39e62", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-667920786-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7b0a7b0d400a42c9b32dc3f491d17d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap916a1330-26", "ovs_interfaceid": "916a1330-268c-4073-9168-b0a8c3efb887", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1993.758160] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:a5:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '028bae2d-fe6c-4207-b4a3-3fab45fbf1d6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '916a1330-268c-4073-9168-b0a8c3efb887', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1993.765846] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Creating folder: Project (7b0a7b0d400a42c9b32dc3f491d17d74). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1993.766588] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92e0e006-0a0f-4162-babc-868330d4d692 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.780269] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Created folder: Project (7b0a7b0d400a42c9b32dc3f491d17d74) in parent group-v942801. [ 1993.780469] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Creating folder: Instances. Parent ref: group-v942908. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1993.780737] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c21ff41c-86a9-46ea-8b3c-6296530e2f60 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.791681] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Created folder: Instances in parent group-v942908. [ 1993.791953] env[61573]: DEBUG oslo.service.loopingcall [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1993.792175] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3053874-e935-40c5-ac81-268e759611f1] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1993.792397] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f50641fe-4d4f-4efd-80d7-3a10449ddd69 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.814861] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1993.814861] env[61573]: value = "task-4836915" [ 1993.814861] env[61573]: _type = "Task" [ 1993.814861] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.823577] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836915, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.325200] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836915, 'name': CreateVM_Task, 'duration_secs': 0.300766} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.325412] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3053874-e935-40c5-ac81-268e759611f1] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1994.326089] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.326260] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.326577] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1994.326834] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d33891c-c6f2-4fd8-bd69-51475fa36f0f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.331828] env[61573]: DEBUG oslo_vmware.api [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Waiting for the task: (returnval){ [ 1994.331828] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c8c2d1-4e47-ed48-f9b3-e86c03ededfa" [ 1994.331828] env[61573]: _type = "Task" [ 1994.331828] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.340847] env[61573]: DEBUG oslo_vmware.api [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c8c2d1-4e47-ed48-f9b3-e86c03ededfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.840860] env[61573]: DEBUG nova.compute.manager [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] [instance: c3053874-e935-40c5-ac81-268e759611f1] Received event network-vif-plugged-916a1330-268c-4073-9168-b0a8c3efb887 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1994.840860] env[61573]: DEBUG oslo_concurrency.lockutils [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] Acquiring lock "c3053874-e935-40c5-ac81-268e759611f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.841294] env[61573]: DEBUG oslo_concurrency.lockutils [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] Lock "c3053874-e935-40c5-ac81-268e759611f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.841294] env[61573]: DEBUG oslo_concurrency.lockutils [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] Lock "c3053874-e935-40c5-ac81-268e759611f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.841437] env[61573]: DEBUG nova.compute.manager [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] [instance: c3053874-e935-40c5-ac81-268e759611f1] No waiting events found dispatching network-vif-plugged-916a1330-268c-4073-9168-b0a8c3efb887 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1994.841544] env[61573]: WARNING nova.compute.manager [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] [instance: c3053874-e935-40c5-ac81-268e759611f1] Received unexpected event network-vif-plugged-916a1330-268c-4073-9168-b0a8c3efb887 for instance with vm_state building and task_state spawning. [ 1994.841705] env[61573]: DEBUG nova.compute.manager [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] [instance: c3053874-e935-40c5-ac81-268e759611f1] Received event network-changed-916a1330-268c-4073-9168-b0a8c3efb887 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 1994.841859] env[61573]: DEBUG nova.compute.manager [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] [instance: c3053874-e935-40c5-ac81-268e759611f1] Refreshing instance network info cache due to event network-changed-916a1330-268c-4073-9168-b0a8c3efb887. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 1994.842081] env[61573]: DEBUG oslo_concurrency.lockutils [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] Acquiring lock "refresh_cache-c3053874-e935-40c5-ac81-268e759611f1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.842249] env[61573]: DEBUG oslo_concurrency.lockutils [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] Acquired lock "refresh_cache-c3053874-e935-40c5-ac81-268e759611f1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.842413] env[61573]: DEBUG nova.network.neutron [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] [instance: c3053874-e935-40c5-ac81-268e759611f1] Refreshing network info cache for port 916a1330-268c-4073-9168-b0a8c3efb887 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1994.847500] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1994.847716] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1994.847908] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.095887] env[61573]: DEBUG nova.network.neutron [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] [instance: c3053874-e935-40c5-ac81-268e759611f1] Updated VIF entry in instance network info cache for port 916a1330-268c-4073-9168-b0a8c3efb887. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1995.096444] env[61573]: DEBUG nova.network.neutron [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] [instance: c3053874-e935-40c5-ac81-268e759611f1] Updating instance_info_cache with network_info: [{"id": "916a1330-268c-4073-9168-b0a8c3efb887", "address": "fa:16:3e:e1:a5:70", "network": {"id": "db5231c1-a273-4914-8489-af82fbe39e62", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-667920786-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7b0a7b0d400a42c9b32dc3f491d17d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap916a1330-26", "ovs_interfaceid": "916a1330-268c-4073-9168-b0a8c3efb887", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.106157] env[61573]: DEBUG oslo_concurrency.lockutils [req-5c991b39-c068-4dfc-8359-51a7b69c6db6 req-98d1df53-c67b-4112-9777-474a6a6c77aa service nova] Releasing lock "refresh_cache-c3053874-e935-40c5-ac81-268e759611f1" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.943398] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "2aa8f536-49ba-43f8-8f36-5741e300652a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1996.943748] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.510093] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquiring lock "c3053874-e935-40c5-ac81-268e759611f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.404585] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2010.404914] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11295}} [ 2010.415606] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] There are 0 instances to clean {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11304}} [ 2014.611649] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "d892e1ae-e434-40b2-b86a-99c55d473363" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.611649] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "d892e1ae-e434-40b2-b86a-99c55d473363" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.403943] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2015.404146] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances with incomplete migration {{(pid=61573) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11333}} [ 2016.412495] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2016.425442] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.425681] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.425850] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.426026] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2016.427456] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7157f927-0fe7-4ecf-a46b-ee6d60cdad08 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.192717] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749fd085-0d61-4d83-bf1d-ff329de1a7c6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.206618] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0e5f93-9976-411a-a9bf-80a2784f3fc0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.213257] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d69c83f-96ee-4a30-9dee-434e8f02b4b0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.241738] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180547MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2017.241954] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.242104] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.372046] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d272f481-f590-46e9-9f51-0c7601ff34ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.372046] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.372216] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.372342] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.372463] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cef978e5-e61d-4188-a58e-1b5690731c1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.372578] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.372715] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.372831] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.372940] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.373062] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2017.389644] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2aa8f536-49ba-43f8-8f36-5741e300652a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2017.402326] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d892e1ae-e434-40b2-b86a-99c55d473363 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2017.402588] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2017.402749] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '80', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '2', 'io_workload': '10', 'num_proj_33f496dcb85142ae9da6ddf8e8e9e7c9': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_ad53ba52560d475e9c8a48903da448ec': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_7b0a7b0d400a42c9b32dc3f491d17d74': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2017.421341] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing inventories for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2017.436141] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Updating ProviderTree inventory for provider b1eff98b-2b30-4574-a87d-d151235a2dba from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2017.436141] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Updating inventory in ProviderTree for provider b1eff98b-2b30-4574-a87d-d151235a2dba with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2017.447717] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing aggregate associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, aggregates: None {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2017.467190] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing trait associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2017.629505] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfba52b-cad7-4dd8-8782-2e421dbe01ff {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.637790] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532ef69e-285f-4b3f-94bc-5e7e5d6c9412 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.669230] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc69712-6c83-4240-93a3-4905b5aa38e4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.677405] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafd4be9-1444-4540-a006-b2c33da34f12 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.690455] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2017.699685] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2017.716891] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2017.717111] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.475s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.708710] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.709111] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2018.709111] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2018.731938] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.732187] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.732246] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.732364] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.732482] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.732624] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.732706] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.732824] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.732941] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.733062] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c3053874-e935-40c5-ac81-268e759611f1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2018.733182] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2018.733788] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.734036] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.734177] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.734295] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2019.404376] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2019.404667] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.403899] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.400828] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2036.405124] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2036.516759] env[61573]: WARNING oslo_vmware.rw_handles [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2036.516759] env[61573]: ERROR oslo_vmware.rw_handles [ 2036.517400] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/79fe14a6-7f4e-440c-b079-86081cb275f8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2036.519699] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2036.519965] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Copying Virtual Disk [datastore2] vmware_temp/79fe14a6-7f4e-440c-b079-86081cb275f8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/79fe14a6-7f4e-440c-b079-86081cb275f8/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2036.520278] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba2a44f8-e0fa-42ce-aade-a97b36d6d5e0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.528139] env[61573]: DEBUG oslo_vmware.api [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for the task: (returnval){ [ 2036.528139] env[61573]: value = "task-4836916" [ 2036.528139] env[61573]: _type = "Task" [ 2036.528139] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.536519] env[61573]: DEBUG oslo_vmware.api [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Task: {'id': task-4836916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.039404] env[61573]: DEBUG oslo_vmware.exceptions [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2037.039693] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.040261] env[61573]: ERROR nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2037.040261] env[61573]: Faults: ['InvalidArgument'] [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Traceback (most recent call last): [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] yield resources [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] self.driver.spawn(context, instance, image_meta, [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] self._fetch_image_if_missing(context, vi) [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] image_cache(vi, tmp_image_ds_loc) [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] vm_util.copy_virtual_disk( [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] session._wait_for_task(vmdk_copy_task) [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] return self.wait_for_task(task_ref) [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] return evt.wait() [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] result = hub.switch() [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] return self.greenlet.switch() [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] self.f(*self.args, **self.kw) [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] raise exceptions.translate_fault(task_info.error) [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Faults: ['InvalidArgument'] [ 2037.040261] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] [ 2037.041606] env[61573]: INFO nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Terminating instance [ 2037.043445] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2037.043700] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2037.043939] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.044116] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2037.044896] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864ea86f-1fe6-46e1-a33e-2d3673c2218e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.047550] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0d19c9d-5352-44a9-a8b2-421a7f163d88 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.054826] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2037.055084] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3b23326-9588-499f-ade6-d250f1c542ff {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.057380] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2037.057567] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2037.058534] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d712d0de-8c98-42b3-844c-da8a45d10d35 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.064164] env[61573]: DEBUG oslo_vmware.api [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Waiting for the task: (returnval){ [ 2037.064164] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]526de0a4-758b-dd14-9f7e-2989f7b9aafd" [ 2037.064164] env[61573]: _type = "Task" [ 2037.064164] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.071657] env[61573]: DEBUG oslo_vmware.api [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]526de0a4-758b-dd14-9f7e-2989f7b9aafd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.124614] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2037.124859] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2037.124996] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Deleting the datastore file [datastore2] d272f481-f590-46e9-9f51-0c7601ff34ce {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2037.125285] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1cd15bdb-0f73-4eb5-897e-ad16790a92be {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.132335] env[61573]: DEBUG oslo_vmware.api [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for the task: (returnval){ [ 2037.132335] env[61573]: value = "task-4836918" [ 2037.132335] env[61573]: _type = "Task" [ 2037.132335] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.140347] env[61573]: DEBUG oslo_vmware.api [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Task: {'id': task-4836918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.575336] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2037.575709] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Creating directory with path [datastore2] vmware_temp/b188676b-be93-44e4-ade6-5687992ba226/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2037.575771] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac3b51f1-c581-4496-8654-c3e2b73cf837 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.587245] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Created directory with path [datastore2] vmware_temp/b188676b-be93-44e4-ade6-5687992ba226/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2037.587449] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Fetch image to [datastore2] vmware_temp/b188676b-be93-44e4-ade6-5687992ba226/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2037.587622] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/b188676b-be93-44e4-ade6-5687992ba226/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2037.588390] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99dbbe78-924a-427f-9622-2e0d7f91f73c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.595316] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad68178-2872-405c-bbb7-fba94a8a5d5d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.604884] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c965683e-7928-488d-91e0-523ccc51a3be {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.639647] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20125da4-e036-4e25-9e7b-1822964fa44e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.649258] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bd37ca50-3bf0-45c0-b9da-bee7bfeffa37 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.651122] env[61573]: DEBUG oslo_vmware.api [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Task: {'id': task-4836918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076679} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.651375] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2037.651558] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2037.651731] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2037.651909] env[61573]: INFO nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2037.654271] env[61573]: DEBUG nova.compute.claims [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2037.654460] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.654708] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.672813] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2037.802231] env[61573]: DEBUG oslo_vmware.rw_handles [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b188676b-be93-44e4-ade6-5687992ba226/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2037.864216] env[61573]: DEBUG oslo_vmware.rw_handles [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2037.864422] env[61573]: DEBUG oslo_vmware.rw_handles [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b188676b-be93-44e4-ade6-5687992ba226/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2037.938059] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adb1b3f-6ce8-4b10-9457-d1dadc3003ff {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.946670] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11857685-479f-468f-8bfb-67ef0751dd57 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.976625] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2986e140-9467-4b0e-b007-1d63d86c4dec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.985228] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ce78d9-ab85-4340-bcdc-60fc415c8b35 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.000216] env[61573]: DEBUG nova.compute.provider_tree [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2038.010417] env[61573]: DEBUG nova.scheduler.client.report [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2038.032284] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.377s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.032800] env[61573]: ERROR nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2038.032800] env[61573]: Faults: ['InvalidArgument'] [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Traceback (most recent call last): [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] self.driver.spawn(context, instance, image_meta, [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] self._fetch_image_if_missing(context, vi) [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] image_cache(vi, tmp_image_ds_loc) [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] vm_util.copy_virtual_disk( [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] session._wait_for_task(vmdk_copy_task) [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] return self.wait_for_task(task_ref) [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] return evt.wait() [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] result = hub.switch() [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] return self.greenlet.switch() [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] self.f(*self.args, **self.kw) [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] raise exceptions.translate_fault(task_info.error) [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Faults: ['InvalidArgument'] [ 2038.032800] env[61573]: ERROR nova.compute.manager [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] [ 2038.034670] env[61573]: DEBUG nova.compute.utils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2038.035298] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Build of instance d272f481-f590-46e9-9f51-0c7601ff34ce was re-scheduled: A specified parameter was not correct: fileType [ 2038.035298] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2038.035691] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2038.035869] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2038.036059] env[61573]: DEBUG nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2038.036230] env[61573]: DEBUG nova.network.neutron [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2038.335325] env[61573]: DEBUG nova.network.neutron [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.347520] env[61573]: INFO nova.compute.manager [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Took 0.31 seconds to deallocate network for instance. [ 2038.456550] env[61573]: INFO nova.scheduler.client.report [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Deleted allocations for instance d272f481-f590-46e9-9f51-0c7601ff34ce [ 2038.481034] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8ed9466e-2b4a-4f84-be0a-d841276d66cf tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 652.154s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.482023] env[61573]: DEBUG oslo_concurrency.lockutils [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 456.283s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.482291] env[61573]: DEBUG oslo_concurrency.lockutils [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "d272f481-f590-46e9-9f51-0c7601ff34ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.482601] env[61573]: DEBUG oslo_concurrency.lockutils [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.482846] env[61573]: DEBUG oslo_concurrency.lockutils [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.486755] env[61573]: INFO nova.compute.manager [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Terminating instance [ 2038.488817] env[61573]: DEBUG nova.compute.manager [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2038.489037] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2038.489507] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4f91cb4-3212-4d98-bd02-a3d123daa162 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.495994] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2038.502883] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bce8c8e-1573-42a5-909d-3cd8e77179ba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.535641] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d272f481-f590-46e9-9f51-0c7601ff34ce could not be found. [ 2038.535901] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2038.536114] env[61573]: INFO nova.compute.manager [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2038.536399] env[61573]: DEBUG oslo.service.loopingcall [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2038.539063] env[61573]: DEBUG nova.compute.manager [-] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2038.539180] env[61573]: DEBUG nova.network.neutron [-] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2038.555330] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.555568] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.557125] env[61573]: INFO nova.compute.claims [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2038.566862] env[61573]: DEBUG nova.network.neutron [-] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.577478] env[61573]: INFO nova.compute.manager [-] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] Took 0.04 seconds to deallocate network for instance. [ 2038.679240] env[61573]: DEBUG oslo_concurrency.lockutils [None req-410b18a5-5200-4be4-9b00-16927935fb16 tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.680083] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 293.957s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.680280] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d272f481-f590-46e9-9f51-0c7601ff34ce] During sync_power_state the instance has a pending task (deleting). Skip. [ 2038.680562] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "d272f481-f590-46e9-9f51-0c7601ff34ce" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.763201] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e3c37c-cf27-428e-be25-3066f749268a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.771692] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c87fbd-ab0b-46ae-96af-d357c275af68 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.803672] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8c3331-b3bb-4187-a52b-37c848dee46a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.811905] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1486b6b4-a945-42d1-81d3-74234963c39d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.825399] env[61573]: DEBUG nova.compute.provider_tree [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2038.837056] env[61573]: DEBUG nova.scheduler.client.report [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2038.852198] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.296s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.852683] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2038.888084] env[61573]: DEBUG nova.compute.utils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2038.889615] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2038.889823] env[61573]: DEBUG nova.network.neutron [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2038.901775] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2038.968050] env[61573]: DEBUG nova.policy [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e209cfe158004e46a9693c62a5c2e3f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90325af48fc44451a6c15e089107271a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 2039.005676] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2039.035055] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2039.035322] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2039.035481] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2039.035660] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2039.035826] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2039.035970] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2039.036293] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2039.036484] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2039.036686] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2039.036856] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2039.037040] env[61573]: DEBUG nova.virt.hardware [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2039.037910] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2688a9b9-77a5-4a42-b20b-1ba224aa1fe1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.046927] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88311cc7-5f31-4115-9126-c230b28af429 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.410916] env[61573]: DEBUG nova.network.neutron [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Successfully created port: 6716ccd9-6f70-4167-8a9c-f18a2fe433db {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2040.055241] env[61573]: DEBUG nova.network.neutron [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Successfully updated port: 6716ccd9-6f70-4167-8a9c-f18a2fe433db {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2040.068350] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "refresh_cache-2aa8f536-49ba-43f8-8f36-5741e300652a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.068555] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "refresh_cache-2aa8f536-49ba-43f8-8f36-5741e300652a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.068701] env[61573]: DEBUG nova.network.neutron [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2040.114830] env[61573]: DEBUG nova.network.neutron [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2040.339966] env[61573]: DEBUG nova.network.neutron [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Updating instance_info_cache with network_info: [{"id": "6716ccd9-6f70-4167-8a9c-f18a2fe433db", "address": "fa:16:3e:a0:46:85", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6716ccd9-6f", "ovs_interfaceid": "6716ccd9-6f70-4167-8a9c-f18a2fe433db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.351770] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "refresh_cache-2aa8f536-49ba-43f8-8f36-5741e300652a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.352037] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Instance network_info: |[{"id": "6716ccd9-6f70-4167-8a9c-f18a2fe433db", "address": "fa:16:3e:a0:46:85", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6716ccd9-6f", "ovs_interfaceid": "6716ccd9-6f70-4167-8a9c-f18a2fe433db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2040.352783] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:46:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6716ccd9-6f70-4167-8a9c-f18a2fe433db', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2040.360562] env[61573]: DEBUG oslo.service.loopingcall [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2040.362485] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2040.362737] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10697e59-ba6a-425e-a29f-253ac190c0ec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.383912] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2040.383912] env[61573]: value = "task-4836919" [ 2040.383912] env[61573]: _type = "Task" [ 2040.383912] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.392664] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836919, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.489570] env[61573]: DEBUG nova.compute.manager [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Received event network-vif-plugged-6716ccd9-6f70-4167-8a9c-f18a2fe433db {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2040.489849] env[61573]: DEBUG oslo_concurrency.lockutils [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] Acquiring lock "2aa8f536-49ba-43f8-8f36-5741e300652a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.490010] env[61573]: DEBUG oslo_concurrency.lockutils [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.490228] env[61573]: DEBUG oslo_concurrency.lockutils [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.490356] env[61573]: DEBUG nova.compute.manager [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] No waiting events found dispatching network-vif-plugged-6716ccd9-6f70-4167-8a9c-f18a2fe433db {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2040.490522] env[61573]: WARNING nova.compute.manager [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Received unexpected event network-vif-plugged-6716ccd9-6f70-4167-8a9c-f18a2fe433db for instance with vm_state building and task_state spawning. [ 2040.490684] env[61573]: DEBUG nova.compute.manager [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Received event network-changed-6716ccd9-6f70-4167-8a9c-f18a2fe433db {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2040.490838] env[61573]: DEBUG nova.compute.manager [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Refreshing instance network info cache due to event network-changed-6716ccd9-6f70-4167-8a9c-f18a2fe433db. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 2040.491028] env[61573]: DEBUG oslo_concurrency.lockutils [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] Acquiring lock "refresh_cache-2aa8f536-49ba-43f8-8f36-5741e300652a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.491167] env[61573]: DEBUG oslo_concurrency.lockutils [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] Acquired lock "refresh_cache-2aa8f536-49ba-43f8-8f36-5741e300652a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.491325] env[61573]: DEBUG nova.network.neutron [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Refreshing network info cache for port 6716ccd9-6f70-4167-8a9c-f18a2fe433db {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2040.823314] env[61573]: DEBUG nova.network.neutron [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Updated VIF entry in instance network info cache for port 6716ccd9-6f70-4167-8a9c-f18a2fe433db. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2040.823852] env[61573]: DEBUG nova.network.neutron [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Updating instance_info_cache with network_info: [{"id": "6716ccd9-6f70-4167-8a9c-f18a2fe433db", "address": "fa:16:3e:a0:46:85", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6716ccd9-6f", "ovs_interfaceid": "6716ccd9-6f70-4167-8a9c-f18a2fe433db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.833242] env[61573]: DEBUG oslo_concurrency.lockutils [req-96a882b7-c65f-49a1-98a4-7dbc0590705d req-29f13624-6808-4396-b528-f1f4d36a131f service nova] Releasing lock "refresh_cache-2aa8f536-49ba-43f8-8f36-5741e300652a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.893660] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836919, 'name': CreateVM_Task, 'duration_secs': 0.308401} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.893838] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2040.894593] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.894759] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.895088] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2040.895336] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed92b24b-3453-4078-8796-960b0df773bc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.899984] env[61573]: DEBUG oslo_vmware.api [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 2040.899984] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52610777-d2a5-0edc-528a-d5ccba0c8278" [ 2040.899984] env[61573]: _type = "Task" [ 2040.899984] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.907820] env[61573]: DEBUG oslo_vmware.api [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52610777-d2a5-0edc-528a-d5ccba0c8278, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.411240] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.411599] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2041.411953] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.413658] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2076.427039] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.427264] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.427434] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.427585] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2076.429058] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb86a58d-637c-441e-b4fe-6e3c4ac9984a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.437467] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c3a590-39cc-4cb4-8896-b246bdf37b4c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.452694] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7f4f77-dfed-4b9e-970b-11392d9010db {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.459309] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f919f990-22b3-43a0-947e-48b3cb885664 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.487935] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180508MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2076.488105] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.488237] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.570606] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c96755a9-1e1c-42ed-a170-35914ef05333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.570893] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.571117] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.571325] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cef978e5-e61d-4188-a58e-1b5690731c1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.571610] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.571729] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.571858] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.571978] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.572117] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.572280] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2aa8f536-49ba-43f8-8f36-5741e300652a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2076.584929] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d892e1ae-e434-40b2-b86a-99c55d473363 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2076.585216] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2076.585386] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '81', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_33f496dcb85142ae9da6ddf8e8e9e7c9': '1', 'io_workload': '10', 'num_proj_90325af48fc44451a6c15e089107271a': '2', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_ad53ba52560d475e9c8a48903da448ec': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_7b0a7b0d400a42c9b32dc3f491d17d74': '1', 'num_task_spawning': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2076.729590] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fbb92b-a9a0-4685-a465-0dbe8545eb01 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.737656] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a7e1d6-f2c8-4414-9c6e-0b542e10fec5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.768471] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226c2340-dc0a-47d9-9743-66da05bc130e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.775761] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058001e1-99ac-4b47-9088-40c75526ff1d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.788565] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2076.797739] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2076.814143] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2076.814340] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.326s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.805256] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.805633] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2077.805633] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2077.828181] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.828411] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.828548] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.828678] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.828803] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.828926] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.829058] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.829256] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.829388] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c3053874-e935-40c5-ac81-268e759611f1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.829507] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2077.829624] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2077.830185] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.830328] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2078.406309] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.458534] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.403654] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.404168] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.404615] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.404906] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.399443] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2086.534594] env[61573]: WARNING oslo_vmware.rw_handles [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2086.534594] env[61573]: ERROR oslo_vmware.rw_handles [ 2086.535352] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/b188676b-be93-44e4-ade6-5687992ba226/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2086.536781] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2086.537021] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Copying Virtual Disk [datastore2] vmware_temp/b188676b-be93-44e4-ade6-5687992ba226/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/b188676b-be93-44e4-ade6-5687992ba226/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2086.537334] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4345053-1713-4ac3-8a85-24d8a2a95836 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.545823] env[61573]: DEBUG oslo_vmware.api [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Waiting for the task: (returnval){ [ 2086.545823] env[61573]: value = "task-4836920" [ 2086.545823] env[61573]: _type = "Task" [ 2086.545823] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.554261] env[61573]: DEBUG oslo_vmware.api [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Task: {'id': task-4836920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.057257] env[61573]: DEBUG oslo_vmware.exceptions [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2087.057541] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2087.058116] env[61573]: ERROR nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2087.058116] env[61573]: Faults: ['InvalidArgument'] [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Traceback (most recent call last): [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] yield resources [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] self.driver.spawn(context, instance, image_meta, [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] self._fetch_image_if_missing(context, vi) [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] image_cache(vi, tmp_image_ds_loc) [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] vm_util.copy_virtual_disk( [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] session._wait_for_task(vmdk_copy_task) [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] return self.wait_for_task(task_ref) [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] return evt.wait() [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] result = hub.switch() [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] return self.greenlet.switch() [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] self.f(*self.args, **self.kw) [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] raise exceptions.translate_fault(task_info.error) [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Faults: ['InvalidArgument'] [ 2087.058116] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] [ 2087.059357] env[61573]: INFO nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Terminating instance [ 2087.060763] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2087.060763] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2087.060763] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2dcdc033-6141-4604-89fd-913cd947b934 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.062941] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2087.063088] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2087.063837] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f9bd5a-4277-480d-bed9-bba91152d090 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.071284] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2087.071756] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23424fa4-1176-4561-8fd9-7219b377ee4a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.073893] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2087.074089] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2087.075097] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4164438c-3a64-450a-8894-eabc5895696f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.080025] env[61573]: DEBUG oslo_vmware.api [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 2087.080025] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]522a0240-e70a-9f20-8af0-4cc3af54e534" [ 2087.080025] env[61573]: _type = "Task" [ 2087.080025] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.088078] env[61573]: DEBUG oslo_vmware.api [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]522a0240-e70a-9f20-8af0-4cc3af54e534, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.145015] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2087.145316] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2087.145527] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Deleting the datastore file [datastore2] c96755a9-1e1c-42ed-a170-35914ef05333 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2087.145841] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9ca94a5-34ea-45bf-ab5d-8ee53c734650 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.152260] env[61573]: DEBUG oslo_vmware.api [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Waiting for the task: (returnval){ [ 2087.152260] env[61573]: value = "task-4836922" [ 2087.152260] env[61573]: _type = "Task" [ 2087.152260] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.160723] env[61573]: DEBUG oslo_vmware.api [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Task: {'id': task-4836922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.590572] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2087.590964] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating directory with path [datastore2] vmware_temp/abb56278-a1c4-48bf-b190-e58889cc5c98/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2087.591104] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f292a51-1197-4972-bdd4-acffd383bb4c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.603466] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Created directory with path [datastore2] vmware_temp/abb56278-a1c4-48bf-b190-e58889cc5c98/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2087.603620] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Fetch image to [datastore2] vmware_temp/abb56278-a1c4-48bf-b190-e58889cc5c98/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2087.603774] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/abb56278-a1c4-48bf-b190-e58889cc5c98/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2087.604560] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115af5d3-6f30-429d-83ea-c1287bf6f96c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.610955] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a36b803-447c-4009-992d-eaef756735a8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.619876] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24fe6ee-8c2e-4e74-be97-d1aa562430bf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.649780] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de854003-1af6-42ac-ab84-fe05cb5f65a0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.658076] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-33e5504e-7928-4d4c-a26f-55aa669f3e15 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.662422] env[61573]: DEBUG oslo_vmware.api [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Task: {'id': task-4836922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079946} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.662993] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2087.663212] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2087.663442] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2087.663636] env[61573]: INFO nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2087.665977] env[61573]: DEBUG nova.compute.claims [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2087.666248] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.666568] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.682253] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2087.735602] env[61573]: DEBUG oslo_vmware.rw_handles [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/abb56278-a1c4-48bf-b190-e58889cc5c98/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2087.798486] env[61573]: DEBUG oslo_vmware.rw_handles [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2087.798676] env[61573]: DEBUG oslo_vmware.rw_handles [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/abb56278-a1c4-48bf-b190-e58889cc5c98/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2087.908186] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ec91e7-eca9-4902-a090-d0c14df9886f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.917218] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e4afc4-f6a3-4da7-9df7-60ce1931e3a1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.949416] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8c1625-4358-407b-bed1-46aff695b574 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.957325] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca786bfb-525e-4907-8f6f-0149fbfa4ae2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.970590] env[61573]: DEBUG nova.compute.provider_tree [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2087.979218] env[61573]: DEBUG nova.scheduler.client.report [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2087.994674] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.995254] env[61573]: ERROR nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2087.995254] env[61573]: Faults: ['InvalidArgument'] [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Traceback (most recent call last): [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] self.driver.spawn(context, instance, image_meta, [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] self._fetch_image_if_missing(context, vi) [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] image_cache(vi, tmp_image_ds_loc) [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] vm_util.copy_virtual_disk( [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] session._wait_for_task(vmdk_copy_task) [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] return self.wait_for_task(task_ref) [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] return evt.wait() [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] result = hub.switch() [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] return self.greenlet.switch() [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] self.f(*self.args, **self.kw) [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] raise exceptions.translate_fault(task_info.error) [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Faults: ['InvalidArgument'] [ 2087.995254] env[61573]: ERROR nova.compute.manager [instance: c96755a9-1e1c-42ed-a170-35914ef05333] [ 2087.996298] env[61573]: DEBUG nova.compute.utils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2087.997510] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Build of instance c96755a9-1e1c-42ed-a170-35914ef05333 was re-scheduled: A specified parameter was not correct: fileType [ 2087.997510] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2087.997860] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2087.998050] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2087.998230] env[61573]: DEBUG nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2087.998447] env[61573]: DEBUG nova.network.neutron [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2088.368271] env[61573]: DEBUG nova.network.neutron [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.380839] env[61573]: INFO nova.compute.manager [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Took 0.38 seconds to deallocate network for instance. [ 2088.499277] env[61573]: INFO nova.scheduler.client.report [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Deleted allocations for instance c96755a9-1e1c-42ed-a170-35914ef05333 [ 2088.524424] env[61573]: DEBUG oslo_concurrency.lockutils [None req-624b1f90-28bc-469e-b90b-2503dde1ab05 tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "c96755a9-1e1c-42ed-a170-35914ef05333" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 683.171s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.525754] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "c96755a9-1e1c-42ed-a170-35914ef05333" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 486.718s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.525973] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Acquiring lock "c96755a9-1e1c-42ed-a170-35914ef05333-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.526204] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "c96755a9-1e1c-42ed-a170-35914ef05333-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.526380] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "c96755a9-1e1c-42ed-a170-35914ef05333-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.529711] env[61573]: INFO nova.compute.manager [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Terminating instance [ 2088.531597] env[61573]: DEBUG nova.compute.manager [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2088.531812] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2088.532102] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f020dff0-710e-4878-b416-3ede219e4354 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.542463] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454dfe3d-1fa3-49a9-987f-bd2872540b04 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.552831] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2088.577099] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c96755a9-1e1c-42ed-a170-35914ef05333 could not be found. [ 2088.577283] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2088.577515] env[61573]: INFO nova.compute.manager [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2088.577771] env[61573]: DEBUG oslo.service.loopingcall [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2088.578085] env[61573]: DEBUG nova.compute.manager [-] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2088.578160] env[61573]: DEBUG nova.network.neutron [-] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2088.604369] env[61573]: DEBUG nova.network.neutron [-] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.605975] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.606529] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.609101] env[61573]: INFO nova.compute.claims [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2088.613029] env[61573]: INFO nova.compute.manager [-] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] Took 0.03 seconds to deallocate network for instance. [ 2088.745471] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1a9a9556-706d-4c23-9ea2-4b7d18673e2d tempest-InstanceActionsV221TestJSON-2122004276 tempest-InstanceActionsV221TestJSON-2122004276-project-member] Lock "c96755a9-1e1c-42ed-a170-35914ef05333" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.220s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.746595] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "c96755a9-1e1c-42ed-a170-35914ef05333" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 344.023s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.746848] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c96755a9-1e1c-42ed-a170-35914ef05333] During sync_power_state the instance has a pending task (deleting). Skip. [ 2088.747079] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "c96755a9-1e1c-42ed-a170-35914ef05333" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.808210] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adfa067-90e4-4683-b1aa-12ba086b049d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.817097] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05425307-c44d-42c2-95c4-e3887fb8ad08 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.846969] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9128a931-f276-49b6-b036-59b5ce6cbe84 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.855530] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75f69a9-bc7f-4a28-8cd6-6a9d1f6a9192 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.869467] env[61573]: DEBUG nova.compute.provider_tree [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2088.878519] env[61573]: DEBUG nova.scheduler.client.report [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2088.893266] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.287s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.893796] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2088.929608] env[61573]: DEBUG nova.compute.utils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2088.930975] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2088.931170] env[61573]: DEBUG nova.network.neutron [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2088.964574] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2089.019229] env[61573]: DEBUG nova.policy [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08c59199cd604147a2f0a2cd0dc95773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e08d442d2b554ce6bd9e2cc031cf6735', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 2089.049858] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2089.076067] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2089.076454] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2089.076632] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2089.076836] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2089.076986] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2089.077166] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2089.077378] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2089.077584] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2089.077770] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2089.077943] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2089.078142] env[61573]: DEBUG nova.virt.hardware [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2089.079070] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83732aa-3b24-49ea-9798-417d896522f4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.087789] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd17a57b-f919-4817-9511-da79559efce2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.340376] env[61573]: DEBUG nova.network.neutron [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Successfully created port: acedfa66-5d43-41b9-a612-8582a0cb56ac {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2090.446569] env[61573]: DEBUG nova.compute.manager [req-6a5e7e24-e728-4527-8749-cc945b72657e req-f605469f-13a1-40b1-98ec-cbf598fc373c service nova] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Received event network-vif-plugged-acedfa66-5d43-41b9-a612-8582a0cb56ac {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2090.446994] env[61573]: DEBUG oslo_concurrency.lockutils [req-6a5e7e24-e728-4527-8749-cc945b72657e req-f605469f-13a1-40b1-98ec-cbf598fc373c service nova] Acquiring lock "d892e1ae-e434-40b2-b86a-99c55d473363-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.447579] env[61573]: DEBUG oslo_concurrency.lockutils [req-6a5e7e24-e728-4527-8749-cc945b72657e req-f605469f-13a1-40b1-98ec-cbf598fc373c service nova] Lock "d892e1ae-e434-40b2-b86a-99c55d473363-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.447839] env[61573]: DEBUG oslo_concurrency.lockutils [req-6a5e7e24-e728-4527-8749-cc945b72657e req-f605469f-13a1-40b1-98ec-cbf598fc373c service nova] Lock "d892e1ae-e434-40b2-b86a-99c55d473363-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.448122] env[61573]: DEBUG nova.compute.manager [req-6a5e7e24-e728-4527-8749-cc945b72657e req-f605469f-13a1-40b1-98ec-cbf598fc373c service nova] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] No waiting events found dispatching network-vif-plugged-acedfa66-5d43-41b9-a612-8582a0cb56ac {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2090.448418] env[61573]: WARNING nova.compute.manager [req-6a5e7e24-e728-4527-8749-cc945b72657e req-f605469f-13a1-40b1-98ec-cbf598fc373c service nova] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Received unexpected event network-vif-plugged-acedfa66-5d43-41b9-a612-8582a0cb56ac for instance with vm_state building and task_state spawning. [ 2090.479543] env[61573]: DEBUG nova.network.neutron [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Successfully updated port: acedfa66-5d43-41b9-a612-8582a0cb56ac {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2090.495979] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "refresh_cache-d892e1ae-e434-40b2-b86a-99c55d473363" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.496269] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "refresh_cache-d892e1ae-e434-40b2-b86a-99c55d473363" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.496523] env[61573]: DEBUG nova.network.neutron [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2090.563378] env[61573]: DEBUG nova.network.neutron [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2090.833537] env[61573]: DEBUG nova.network.neutron [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Updating instance_info_cache with network_info: [{"id": "acedfa66-5d43-41b9-a612-8582a0cb56ac", "address": "fa:16:3e:58:e2:a0", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacedfa66-5d", "ovs_interfaceid": "acedfa66-5d43-41b9-a612-8582a0cb56ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2090.845886] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "refresh_cache-d892e1ae-e434-40b2-b86a-99c55d473363" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2090.846223] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Instance network_info: |[{"id": "acedfa66-5d43-41b9-a612-8582a0cb56ac", "address": "fa:16:3e:58:e2:a0", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacedfa66-5d", "ovs_interfaceid": "acedfa66-5d43-41b9-a612-8582a0cb56ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2090.846712] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:e2:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acedfa66-5d43-41b9-a612-8582a0cb56ac', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2090.854413] env[61573]: DEBUG oslo.service.loopingcall [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2090.854993] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2090.855325] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98e1d119-4ec6-45a1-9d27-c359b378b77d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.877357] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2090.877357] env[61573]: value = "task-4836923" [ 2090.877357] env[61573]: _type = "Task" [ 2090.877357] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.887894] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836923, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.389230] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836923, 'name': CreateVM_Task, 'duration_secs': 0.363799} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.389423] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2091.390128] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.390300] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.390631] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2091.390887] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa1e8189-6bc4-49d8-baed-0c84c4e68643 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.397017] env[61573]: DEBUG oslo_vmware.api [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 2091.397017] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52a57cac-0ddf-627d-8dd5-5a9d4d1949c6" [ 2091.397017] env[61573]: _type = "Task" [ 2091.397017] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.406048] env[61573]: DEBUG oslo_vmware.api [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52a57cac-0ddf-627d-8dd5-5a9d4d1949c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.909097] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.909430] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2091.909651] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.475392] env[61573]: DEBUG nova.compute.manager [req-49e56e09-d05c-4971-8819-153a8d218f54 req-0e6f527f-ac46-492e-8ffd-639a3de24e9f service nova] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Received event network-changed-acedfa66-5d43-41b9-a612-8582a0cb56ac {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2092.475677] env[61573]: DEBUG nova.compute.manager [req-49e56e09-d05c-4971-8819-153a8d218f54 req-0e6f527f-ac46-492e-8ffd-639a3de24e9f service nova] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Refreshing instance network info cache due to event network-changed-acedfa66-5d43-41b9-a612-8582a0cb56ac. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 2092.475917] env[61573]: DEBUG oslo_concurrency.lockutils [req-49e56e09-d05c-4971-8819-153a8d218f54 req-0e6f527f-ac46-492e-8ffd-639a3de24e9f service nova] Acquiring lock "refresh_cache-d892e1ae-e434-40b2-b86a-99c55d473363" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.476086] env[61573]: DEBUG oslo_concurrency.lockutils [req-49e56e09-d05c-4971-8819-153a8d218f54 req-0e6f527f-ac46-492e-8ffd-639a3de24e9f service nova] Acquired lock "refresh_cache-d892e1ae-e434-40b2-b86a-99c55d473363" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.476272] env[61573]: DEBUG nova.network.neutron [req-49e56e09-d05c-4971-8819-153a8d218f54 req-0e6f527f-ac46-492e-8ffd-639a3de24e9f service nova] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Refreshing network info cache for port acedfa66-5d43-41b9-a612-8582a0cb56ac {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2092.771445] env[61573]: DEBUG nova.network.neutron [req-49e56e09-d05c-4971-8819-153a8d218f54 req-0e6f527f-ac46-492e-8ffd-639a3de24e9f service nova] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Updated VIF entry in instance network info cache for port acedfa66-5d43-41b9-a612-8582a0cb56ac. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2092.771818] env[61573]: DEBUG nova.network.neutron [req-49e56e09-d05c-4971-8819-153a8d218f54 req-0e6f527f-ac46-492e-8ffd-639a3de24e9f service nova] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Updating instance_info_cache with network_info: [{"id": "acedfa66-5d43-41b9-a612-8582a0cb56ac", "address": "fa:16:3e:58:e2:a0", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacedfa66-5d", "ovs_interfaceid": "acedfa66-5d43-41b9-a612-8582a0cb56ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.783722] env[61573]: DEBUG oslo_concurrency.lockutils [req-49e56e09-d05c-4971-8819-153a8d218f54 req-0e6f527f-ac46-492e-8ffd-639a3de24e9f service nova] Releasing lock "refresh_cache-d892e1ae-e434-40b2-b86a-99c55d473363" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2136.984325] env[61573]: WARNING oslo_vmware.rw_handles [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2136.984325] env[61573]: ERROR oslo_vmware.rw_handles [ 2136.985213] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/abb56278-a1c4-48bf-b190-e58889cc5c98/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2136.987050] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2136.987302] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Copying Virtual Disk [datastore2] vmware_temp/abb56278-a1c4-48bf-b190-e58889cc5c98/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/abb56278-a1c4-48bf-b190-e58889cc5c98/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2136.987609] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d404d90b-83d7-47f8-be68-18ad6f92e320 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.996680] env[61573]: DEBUG oslo_vmware.api [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 2136.996680] env[61573]: value = "task-4836924" [ 2136.996680] env[61573]: _type = "Task" [ 2136.996680] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.005707] env[61573]: DEBUG oslo_vmware.api [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': task-4836924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.403791] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2137.417692] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2137.417930] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.418120] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.418274] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2137.419384] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c1a9d8-0822-4ebd-ae51-b38b3dc71cc0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.428220] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f52e355-2ec9-4146-ae26-6538499a0696 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.442260] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8ba19a-6a74-43b0-b561-b22c502950b4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.449150] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b8ae07-719e-45d2-b616-02e1ee631af0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.479639] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180536MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2137.479867] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2137.480095] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.507027] env[61573]: DEBUG oslo_vmware.exceptions [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2137.507327] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.508061] env[61573]: ERROR nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2137.508061] env[61573]: Faults: ['InvalidArgument'] [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Traceback (most recent call last): [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] yield resources [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] self.driver.spawn(context, instance, image_meta, [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] self._fetch_image_if_missing(context, vi) [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] image_cache(vi, tmp_image_ds_loc) [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] vm_util.copy_virtual_disk( [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] session._wait_for_task(vmdk_copy_task) [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] return self.wait_for_task(task_ref) [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] return evt.wait() [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] result = hub.switch() [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] return self.greenlet.switch() [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] self.f(*self.args, **self.kw) [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] raise exceptions.translate_fault(task_info.error) [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Faults: ['InvalidArgument'] [ 2137.508061] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] [ 2137.508906] env[61573]: INFO nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Terminating instance [ 2137.509859] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2137.510078] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2137.511030] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ccfce00-3570-48f5-bb59-a22866081544 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.512783] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2137.513010] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2137.514011] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8caa99-710f-4f33-badd-23ca8c8a66d6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.527651] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2137.527936] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2137.528113] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2137.528985] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-636423d6-37e2-43d3-bf3b-ac8b62095afe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.530564] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69be8914-2f17-4553-a94b-560a229a5bf6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.539230] env[61573]: DEBUG oslo_vmware.api [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 2137.539230] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52227b6a-802e-8416-6939-e86152b6df5e" [ 2137.539230] env[61573]: _type = "Task" [ 2137.539230] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.547944] env[61573]: DEBUG oslo_vmware.api [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52227b6a-802e-8416-6939-e86152b6df5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.558180] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.558345] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.558472] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cef978e5-e61d-4188-a58e-1b5690731c1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.558592] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.558726] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.558878] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.559052] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.559140] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.559244] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2aa8f536-49ba-43f8-8f36-5741e300652a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.559356] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d892e1ae-e434-40b2-b86a-99c55d473363 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2137.559551] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2137.559702] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '82', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_90325af48fc44451a6c15e089107271a': '2', 'io_workload': '10', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '2', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'num_proj_ad53ba52560d475e9c8a48903da448ec': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_7b0a7b0d400a42c9b32dc3f491d17d74': '1', 'num_task_spawning': '2'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2137.604658] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2137.604912] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2137.605124] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Deleting the datastore file [datastore2] 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2137.605577] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38857568-a448-4c75-81b5-72592c23784c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.612416] env[61573]: DEBUG oslo_vmware.api [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 2137.612416] env[61573]: value = "task-4836926" [ 2137.612416] env[61573]: _type = "Task" [ 2137.612416] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.623800] env[61573]: DEBUG oslo_vmware.api [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': task-4836926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.705331] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b129877c-9d66-441c-8cf6-a82b1edf7421 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.713469] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2d9627-af65-442a-9033-6face6acfca6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.744022] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9827d0e-9476-4136-b7d9-e827c20b5a26 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.751725] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d714c3e-e720-481a-b0f8-a9d9c1f1240b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.765381] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2137.774116] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2137.787883] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2137.788042] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.308s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.050549] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2138.050913] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating directory with path [datastore2] vmware_temp/e1dcc48b-a788-4cd4-8f8d-8ecd4ba09e9b/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2138.051054] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c87be95-e80e-4e69-b4a2-a92547fcfae4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.062750] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Created directory with path [datastore2] vmware_temp/e1dcc48b-a788-4cd4-8f8d-8ecd4ba09e9b/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2138.062969] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Fetch image to [datastore2] vmware_temp/e1dcc48b-a788-4cd4-8f8d-8ecd4ba09e9b/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2138.063150] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/e1dcc48b-a788-4cd4-8f8d-8ecd4ba09e9b/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2138.063882] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b10167-43ed-4d33-b780-7f586e4fc3f6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.070665] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44af5480-27e3-47cf-9697-5e8c3faa8fc5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.079927] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb63b49-f5ff-4044-a915-fa81c3218a40 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.110580] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8bda420-222c-4b84-bb5a-3708f4486501 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.122623] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-03c8eab7-5e08-4b60-9718-66b8c69c9676 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.124398] env[61573]: DEBUG oslo_vmware.api [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': task-4836926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088395} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.124640] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2138.124823] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2138.125113] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2138.125250] env[61573]: INFO nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2138.127366] env[61573]: DEBUG nova.compute.claims [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2138.127556] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2138.127779] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2138.147233] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2138.206579] env[61573]: DEBUG oslo_vmware.rw_handles [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e1dcc48b-a788-4cd4-8f8d-8ecd4ba09e9b/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2138.268430] env[61573]: DEBUG oslo_vmware.rw_handles [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2138.268700] env[61573]: DEBUG oslo_vmware.rw_handles [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e1dcc48b-a788-4cd4-8f8d-8ecd4ba09e9b/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2138.358052] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b830eb6-0f8f-405b-874d-9af02690ded9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.365306] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac29405-4e75-44e0-8988-f491a2732fd0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.394859] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9625b06-6a2b-47c4-b48f-da9dc6b052ab {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.402103] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72310b4-b50a-4fcd-81ca-5a6cfba69c91 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.415518] env[61573]: DEBUG nova.compute.provider_tree [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2138.425958] env[61573]: DEBUG nova.scheduler.client.report [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2138.439896] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.312s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.440451] env[61573]: ERROR nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2138.440451] env[61573]: Faults: ['InvalidArgument'] [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Traceback (most recent call last): [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] self.driver.spawn(context, instance, image_meta, [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] self._fetch_image_if_missing(context, vi) [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] image_cache(vi, tmp_image_ds_loc) [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] vm_util.copy_virtual_disk( [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] session._wait_for_task(vmdk_copy_task) [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] return self.wait_for_task(task_ref) [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] return evt.wait() [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] result = hub.switch() [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] return self.greenlet.switch() [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] self.f(*self.args, **self.kw) [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] raise exceptions.translate_fault(task_info.error) [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Faults: ['InvalidArgument'] [ 2138.440451] env[61573]: ERROR nova.compute.manager [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] [ 2138.441352] env[61573]: DEBUG nova.compute.utils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2138.442576] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Build of instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f was re-scheduled: A specified parameter was not correct: fileType [ 2138.442576] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2138.442979] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2138.443220] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2138.443407] env[61573]: DEBUG nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2138.443572] env[61573]: DEBUG nova.network.neutron [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2138.788497] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.788723] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2138.790224] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2138.811262] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2138.811262] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2138.811423] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2138.811719] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2138.811719] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2138.811866] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2138.811866] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c3053874-e935-40c5-ac81-268e759611f1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2138.811964] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2138.812239] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2138.812392] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2138.812927] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.813134] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.813269] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2138.842384] env[61573]: DEBUG nova.network.neutron [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2138.857504] env[61573]: INFO nova.compute.manager [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Took 0.41 seconds to deallocate network for instance. [ 2138.976983] env[61573]: INFO nova.scheduler.client.report [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Deleted allocations for instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f [ 2139.002919] env[61573]: DEBUG oslo_concurrency.lockutils [None req-244d5f9c-6e62-42d6-bed9-3bda781f76c0 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 534.365s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.003230] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 394.279s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.003422] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] During sync_power_state the instance has a pending task (spawning). Skip. [ 2139.003596] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.004106] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 338.819s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.004330] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2139.004538] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.004700] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.007800] env[61573]: INFO nova.compute.manager [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Terminating instance [ 2139.009681] env[61573]: DEBUG nova.compute.manager [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2139.010138] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2139.010429] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-590bde61-3602-4cd5-8062-5c6c3d3441e4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.020586] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bff96e-a2ea-4632-830c-64110999c1d4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.051104] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f could not be found. [ 2139.051403] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2139.051500] env[61573]: INFO nova.compute.manager [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2139.051747] env[61573]: DEBUG oslo.service.loopingcall [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2139.051980] env[61573]: DEBUG nova.compute.manager [-] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2139.052090] env[61573]: DEBUG nova.network.neutron [-] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2139.083353] env[61573]: DEBUG nova.network.neutron [-] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2139.091595] env[61573]: INFO nova.compute.manager [-] [instance: 6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f] Took 0.04 seconds to deallocate network for instance. [ 2139.182976] env[61573]: DEBUG oslo_concurrency.lockutils [None req-8bca167f-8737-4a88-a040-c74af832478e tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "6cdf32c0-f8b8-42c7-902d-c5ca8ea3861f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2140.403986] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.404079] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.404184] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.404184] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.400948] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2168.109484] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquiring lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.109820] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.122568] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2168.180454] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.180702] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.182288] env[61573]: INFO nova.compute.claims [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2168.373828] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1aff5e-87eb-4b73-a423-c9174858de80 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.382113] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693883d8-7ff5-4742-ba13-4cf9227ffb93 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.411414] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357757b6-c5b6-495e-b1c4-f2563e4971f6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.418952] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43c1367-6eab-482a-a7d1-603c2b64b9d2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.432361] env[61573]: DEBUG nova.compute.provider_tree [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.442117] env[61573]: DEBUG nova.scheduler.client.report [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2168.457301] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.457929] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2168.491502] env[61573]: DEBUG nova.compute.utils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2168.492970] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2168.493170] env[61573]: DEBUG nova.network.neutron [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2168.504165] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2168.573014] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2168.631798] env[61573]: DEBUG nova.policy [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2bff1f6e1fa459eac4ae3807485e8fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f5bf0addc41483c9bad3c1497811f08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 2168.645726] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2168.645983] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2168.646158] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2168.646367] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2168.646547] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2168.646702] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2168.646909] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2168.647081] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2168.647251] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2168.647415] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2168.647590] env[61573]: DEBUG nova.virt.hardware [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2168.648511] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8a494e-2a8b-4193-a35a-2971cabbe83e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.657304] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a45072-528e-410d-afc9-370863fc1461 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.071802] env[61573]: DEBUG nova.network.neutron [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Successfully created port: 83a231e6-48f0-4be7-bc7e-f02a1892b6f4 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2169.723460] env[61573]: DEBUG nova.compute.manager [req-e1eba313-f10c-4641-9903-f8c38232a6d1 req-003c6b59-6013-4b2e-adcd-55ee44915583 service nova] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Received event network-vif-plugged-83a231e6-48f0-4be7-bc7e-f02a1892b6f4 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2169.723738] env[61573]: DEBUG oslo_concurrency.lockutils [req-e1eba313-f10c-4641-9903-f8c38232a6d1 req-003c6b59-6013-4b2e-adcd-55ee44915583 service nova] Acquiring lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.723943] env[61573]: DEBUG oslo_concurrency.lockutils [req-e1eba313-f10c-4641-9903-f8c38232a6d1 req-003c6b59-6013-4b2e-adcd-55ee44915583 service nova] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.724129] env[61573]: DEBUG oslo_concurrency.lockutils [req-e1eba313-f10c-4641-9903-f8c38232a6d1 req-003c6b59-6013-4b2e-adcd-55ee44915583 service nova] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.724299] env[61573]: DEBUG nova.compute.manager [req-e1eba313-f10c-4641-9903-f8c38232a6d1 req-003c6b59-6013-4b2e-adcd-55ee44915583 service nova] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] No waiting events found dispatching network-vif-plugged-83a231e6-48f0-4be7-bc7e-f02a1892b6f4 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2169.724491] env[61573]: WARNING nova.compute.manager [req-e1eba313-f10c-4641-9903-f8c38232a6d1 req-003c6b59-6013-4b2e-adcd-55ee44915583 service nova] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Received unexpected event network-vif-plugged-83a231e6-48f0-4be7-bc7e-f02a1892b6f4 for instance with vm_state building and task_state spawning. [ 2169.779069] env[61573]: DEBUG nova.network.neutron [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Successfully updated port: 83a231e6-48f0-4be7-bc7e-f02a1892b6f4 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2169.795050] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquiring lock "refresh_cache-427e30fb-7af2-4ecc-934a-bb2b8d2cb320" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.795493] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquired lock "refresh_cache-427e30fb-7af2-4ecc-934a-bb2b8d2cb320" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.795493] env[61573]: DEBUG nova.network.neutron [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2169.864624] env[61573]: DEBUG nova.network.neutron [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2170.076568] env[61573]: DEBUG nova.network.neutron [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Updating instance_info_cache with network_info: [{"id": "83a231e6-48f0-4be7-bc7e-f02a1892b6f4", "address": "fa:16:3e:15:d0:2c", "network": {"id": "4ef7f13a-fe85-4d7f-8736-9aaf7b0a7a5c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1204597873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f5bf0addc41483c9bad3c1497811f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a231e6-48", "ovs_interfaceid": "83a231e6-48f0-4be7-bc7e-f02a1892b6f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2170.089227] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Releasing lock "refresh_cache-427e30fb-7af2-4ecc-934a-bb2b8d2cb320" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.089516] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Instance network_info: |[{"id": "83a231e6-48f0-4be7-bc7e-f02a1892b6f4", "address": "fa:16:3e:15:d0:2c", "network": {"id": "4ef7f13a-fe85-4d7f-8736-9aaf7b0a7a5c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1204597873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f5bf0addc41483c9bad3c1497811f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a231e6-48", "ovs_interfaceid": "83a231e6-48f0-4be7-bc7e-f02a1892b6f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2170.089915] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:d0:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83a231e6-48f0-4be7-bc7e-f02a1892b6f4', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2170.098157] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Creating folder: Project (9f5bf0addc41483c9bad3c1497811f08). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2170.099071] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ad470d3-0244-4681-b399-57efd7111750 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.110021] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Created folder: Project (9f5bf0addc41483c9bad3c1497811f08) in parent group-v942801. [ 2170.110223] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Creating folder: Instances. Parent ref: group-v942913. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2170.110458] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1acece44-92fc-4b41-abd1-70139e797f28 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.119890] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Created folder: Instances in parent group-v942913. [ 2170.120133] env[61573]: DEBUG oslo.service.loopingcall [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2170.120274] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2170.120484] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15c77440-7fc4-4712-8f4c-32ea84e6d878 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.141067] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2170.141067] env[61573]: value = "task-4836929" [ 2170.141067] env[61573]: _type = "Task" [ 2170.141067] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.149266] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836929, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.652071] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836929, 'name': CreateVM_Task, 'duration_secs': 0.300349} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.652071] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2170.652290] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2170.652472] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.652831] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2170.653090] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c060fec1-7e90-42de-9f14-9adb08d0e7f8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.657878] env[61573]: DEBUG oslo_vmware.api [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Waiting for the task: (returnval){ [ 2170.657878] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b0850c-9ab9-4a55-68de-3a0f2093f2db" [ 2170.657878] env[61573]: _type = "Task" [ 2170.657878] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.665667] env[61573]: DEBUG oslo_vmware.api [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52b0850c-9ab9-4a55-68de-3a0f2093f2db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.168619] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.168964] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2171.169036] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2171.750452] env[61573]: DEBUG nova.compute.manager [req-d5ea3dc1-19ce-4b66-9b9a-e1ddef564e92 req-e3909618-aea6-402f-881f-912946709615 service nova] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Received event network-changed-83a231e6-48f0-4be7-bc7e-f02a1892b6f4 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2171.750686] env[61573]: DEBUG nova.compute.manager [req-d5ea3dc1-19ce-4b66-9b9a-e1ddef564e92 req-e3909618-aea6-402f-881f-912946709615 service nova] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Refreshing instance network info cache due to event network-changed-83a231e6-48f0-4be7-bc7e-f02a1892b6f4. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 2171.750914] env[61573]: DEBUG oslo_concurrency.lockutils [req-d5ea3dc1-19ce-4b66-9b9a-e1ddef564e92 req-e3909618-aea6-402f-881f-912946709615 service nova] Acquiring lock "refresh_cache-427e30fb-7af2-4ecc-934a-bb2b8d2cb320" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2171.751084] env[61573]: DEBUG oslo_concurrency.lockutils [req-d5ea3dc1-19ce-4b66-9b9a-e1ddef564e92 req-e3909618-aea6-402f-881f-912946709615 service nova] Acquired lock "refresh_cache-427e30fb-7af2-4ecc-934a-bb2b8d2cb320" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2171.751262] env[61573]: DEBUG nova.network.neutron [req-d5ea3dc1-19ce-4b66-9b9a-e1ddef564e92 req-e3909618-aea6-402f-881f-912946709615 service nova] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Refreshing network info cache for port 83a231e6-48f0-4be7-bc7e-f02a1892b6f4 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2172.040416] env[61573]: DEBUG nova.network.neutron [req-d5ea3dc1-19ce-4b66-9b9a-e1ddef564e92 req-e3909618-aea6-402f-881f-912946709615 service nova] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Updated VIF entry in instance network info cache for port 83a231e6-48f0-4be7-bc7e-f02a1892b6f4. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2172.040775] env[61573]: DEBUG nova.network.neutron [req-d5ea3dc1-19ce-4b66-9b9a-e1ddef564e92 req-e3909618-aea6-402f-881f-912946709615 service nova] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Updating instance_info_cache with network_info: [{"id": "83a231e6-48f0-4be7-bc7e-f02a1892b6f4", "address": "fa:16:3e:15:d0:2c", "network": {"id": "4ef7f13a-fe85-4d7f-8736-9aaf7b0a7a5c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1204597873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f5bf0addc41483c9bad3c1497811f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a231e6-48", "ovs_interfaceid": "83a231e6-48f0-4be7-bc7e-f02a1892b6f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2172.056183] env[61573]: DEBUG oslo_concurrency.lockutils [req-d5ea3dc1-19ce-4b66-9b9a-e1ddef564e92 req-e3909618-aea6-402f-881f-912946709615 service nova] Releasing lock "refresh_cache-427e30fb-7af2-4ecc-934a-bb2b8d2cb320" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2174.401693] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "07210470-d769-43e0-8d38-b076c374d203" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.402034] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "07210470-d769-43e0-8d38-b076c374d203" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2187.896993] env[61573]: WARNING oslo_vmware.rw_handles [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2187.896993] env[61573]: ERROR oslo_vmware.rw_handles [ 2187.897582] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/e1dcc48b-a788-4cd4-8f8d-8ecd4ba09e9b/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2187.899412] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2187.899658] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Copying Virtual Disk [datastore2] vmware_temp/e1dcc48b-a788-4cd4-8f8d-8ecd4ba09e9b/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/e1dcc48b-a788-4cd4-8f8d-8ecd4ba09e9b/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2187.899955] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77c54d49-c069-4715-ab8e-4299804323fe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.908515] env[61573]: DEBUG oslo_vmware.api [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 2187.908515] env[61573]: value = "task-4836930" [ 2187.908515] env[61573]: _type = "Task" [ 2187.908515] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.917034] env[61573]: DEBUG oslo_vmware.api [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': task-4836930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.420027] env[61573]: DEBUG oslo_vmware.exceptions [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2188.420027] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2188.420027] env[61573]: ERROR nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2188.420027] env[61573]: Faults: ['InvalidArgument'] [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Traceback (most recent call last): [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] yield resources [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] self.driver.spawn(context, instance, image_meta, [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] self._fetch_image_if_missing(context, vi) [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] image_cache(vi, tmp_image_ds_loc) [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] vm_util.copy_virtual_disk( [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] session._wait_for_task(vmdk_copy_task) [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] return self.wait_for_task(task_ref) [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] return evt.wait() [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] result = hub.switch() [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] return self.greenlet.switch() [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] self.f(*self.args, **self.kw) [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] raise exceptions.translate_fault(task_info.error) [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Faults: ['InvalidArgument'] [ 2188.420027] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] [ 2188.421070] env[61573]: INFO nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Terminating instance [ 2188.421905] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2188.422135] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2188.422380] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f53ddf80-6e4e-4340-8cc4-aa66cf17c855 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.424616] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2188.424840] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2188.425599] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca42b49c-cd98-496c-b29d-99299a316ddf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.432923] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2188.433177] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfe81412-eebf-447a-9f28-f7bd2df49611 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.435559] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2188.435764] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2188.436754] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfcbd636-0792-4991-87d8-f5cd02b24f99 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.441933] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 2188.441933] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]520cd300-def7-c796-29e7-824e9974089f" [ 2188.441933] env[61573]: _type = "Task" [ 2188.441933] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.449620] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]520cd300-def7-c796-29e7-824e9974089f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.502425] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2188.502719] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2188.503059] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Deleting the datastore file [datastore2] 04d29fd0-acd8-407a-8b53-341c78e7b341 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2188.503427] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f37f8d1f-1675-44b9-a1a1-dbb4e2a99c55 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.510572] env[61573]: DEBUG oslo_vmware.api [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 2188.510572] env[61573]: value = "task-4836932" [ 2188.510572] env[61573]: _type = "Task" [ 2188.510572] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.519112] env[61573]: DEBUG oslo_vmware.api [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': task-4836932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.951911] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2188.952265] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating directory with path [datastore2] vmware_temp/980292e1-fe5b-40ce-8d0a-efaf2de418f9/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2188.952523] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cda6196d-1c1c-4724-96b0-03c0db740f65 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.965112] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created directory with path [datastore2] vmware_temp/980292e1-fe5b-40ce-8d0a-efaf2de418f9/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2188.965324] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Fetch image to [datastore2] vmware_temp/980292e1-fe5b-40ce-8d0a-efaf2de418f9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2188.965488] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/980292e1-fe5b-40ce-8d0a-efaf2de418f9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2188.966404] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17b09b6-475c-4505-9540-48e63dcd81ba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.975031] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3687940e-8413-411d-ba46-6acbf135fede {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.984850] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88dd3ce7-979b-4ddc-ab6b-452dae19aa29 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.019128] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6742b491-3fa1-499b-bc72-4dca7f4b718a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.028127] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fa67b828-154c-4d94-934d-341480252d3c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.029828] env[61573]: DEBUG oslo_vmware.api [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': task-4836932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069998} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.030084] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2189.030267] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2189.030435] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2189.030605] env[61573]: INFO nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2189.032747] env[61573]: DEBUG nova.compute.claims [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2189.032952] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.033190] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.054688] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2189.110062] env[61573]: DEBUG oslo_vmware.rw_handles [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/980292e1-fe5b-40ce-8d0a-efaf2de418f9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2189.169992] env[61573]: DEBUG oslo_vmware.rw_handles [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2189.170316] env[61573]: DEBUG oslo_vmware.rw_handles [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/980292e1-fe5b-40ce-8d0a-efaf2de418f9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2189.282737] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5a873e-ddf3-4fea-8658-e2f3ed5439ce {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.291221] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23015cd7-3450-4613-b3a5-6f1c00b2ae54 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.322594] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991d045c-b11d-487a-8053-7cde8db0dc50 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.330909] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e485d6-35cf-4c75-9e20-a7154953d8f1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.344661] env[61573]: DEBUG nova.compute.provider_tree [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2189.354304] env[61573]: DEBUG nova.scheduler.client.report [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2189.370358] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.337s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.370920] env[61573]: ERROR nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2189.370920] env[61573]: Faults: ['InvalidArgument'] [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Traceback (most recent call last): [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] self.driver.spawn(context, instance, image_meta, [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] self._fetch_image_if_missing(context, vi) [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] image_cache(vi, tmp_image_ds_loc) [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] vm_util.copy_virtual_disk( [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] session._wait_for_task(vmdk_copy_task) [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] return self.wait_for_task(task_ref) [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] return evt.wait() [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] result = hub.switch() [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] return self.greenlet.switch() [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] self.f(*self.args, **self.kw) [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] raise exceptions.translate_fault(task_info.error) [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Faults: ['InvalidArgument'] [ 2189.370920] env[61573]: ERROR nova.compute.manager [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] [ 2189.371739] env[61573]: DEBUG nova.compute.utils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2189.373403] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Build of instance 04d29fd0-acd8-407a-8b53-341c78e7b341 was re-scheduled: A specified parameter was not correct: fileType [ 2189.373403] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2189.373947] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2189.374165] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2189.374348] env[61573]: DEBUG nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2189.374516] env[61573]: DEBUG nova.network.neutron [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2189.739454] env[61573]: DEBUG nova.network.neutron [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.752188] env[61573]: INFO nova.compute.manager [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Took 0.38 seconds to deallocate network for instance. [ 2189.846332] env[61573]: INFO nova.scheduler.client.report [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Deleted allocations for instance 04d29fd0-acd8-407a-8b53-341c78e7b341 [ 2189.869728] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aa271fd7-3217-4e5c-b1aa-f28eecdaa163 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 568.814s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.870917] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 445.147s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.871126] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] During sync_power_state the instance has a pending task (spawning). Skip. [ 2189.871315] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.872199] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 373.141s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.872458] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "04d29fd0-acd8-407a-8b53-341c78e7b341-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.872675] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.873204] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.875405] env[61573]: INFO nova.compute.manager [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Terminating instance [ 2189.877265] env[61573]: DEBUG nova.compute.manager [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2189.877459] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2189.877716] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f58e184-4a68-44dc-a1ae-4ce600bc281a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.888680] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae213f96-fe90-42dd-b282-62dfe06d1c3e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.900187] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2189.923767] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04d29fd0-acd8-407a-8b53-341c78e7b341 could not be found. [ 2189.924015] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2189.924211] env[61573]: INFO nova.compute.manager [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2189.924464] env[61573]: DEBUG oslo.service.loopingcall [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2189.924699] env[61573]: DEBUG nova.compute.manager [-] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2189.924794] env[61573]: DEBUG nova.network.neutron [-] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2189.951555] env[61573]: DEBUG nova.network.neutron [-] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.962613] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.962927] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.964537] env[61573]: INFO nova.compute.claims [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2189.967111] env[61573]: INFO nova.compute.manager [-] [instance: 04d29fd0-acd8-407a-8b53-341c78e7b341] Took 0.04 seconds to deallocate network for instance. [ 2190.083678] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1e17b3c7-dafb-40bd-b28e-cf3904ad704e tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "04d29fd0-acd8-407a-8b53-341c78e7b341" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.212s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.155906] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dfc2d0-55cf-429a-941d-49c72a0f5f1d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.164789] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f9fe43-d5db-4849-a686-58f02c778420 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.194886] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ea3955-8dad-44c1-a87c-6b6ebeaf8b20 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.202900] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178b8b08-13ed-4324-b3b5-5d12d79bb17f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.217049] env[61573]: DEBUG nova.compute.provider_tree [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2190.226027] env[61573]: DEBUG nova.scheduler.client.report [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2190.239985] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.277s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.240471] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2190.273303] env[61573]: DEBUG nova.compute.utils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2190.274725] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Not allocating networking since 'none' was specified. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 2190.284087] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2190.348639] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2190.375328] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2190.375579] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2190.375801] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2190.375995] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2190.376160] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2190.376308] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2190.376511] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2190.376692] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2190.376869] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2190.377051] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2190.377235] env[61573]: DEBUG nova.virt.hardware [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2190.378114] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb92d86-f116-4201-a63f-873b60660f14 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.386354] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be541ea-0edd-4a9b-9917-5da186bc96dd {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.399939] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Instance VIF info [] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2190.405334] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Creating folder: Project (f5656651931541f9b48c3e185a46a113). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2190.405628] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1daae0b-33ea-4283-9c77-c60d53c4918d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.415939] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Created folder: Project (f5656651931541f9b48c3e185a46a113) in parent group-v942801. [ 2190.416142] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Creating folder: Instances. Parent ref: group-v942916. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2190.416374] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3b390f9-fa8b-4061-8ca9-93f45ce7ccdb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.426190] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Created folder: Instances in parent group-v942916. [ 2190.426424] env[61573]: DEBUG oslo.service.loopingcall [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2190.426636] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07210470-d769-43e0-8d38-b076c374d203] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2190.426843] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b79bb148-2a12-4e6d-8c95-bd7c46996ce5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.444081] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2190.444081] env[61573]: value = "task-4836935" [ 2190.444081] env[61573]: _type = "Task" [ 2190.444081] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.451901] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836935, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.955404] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836935, 'name': CreateVM_Task, 'duration_secs': 0.265118} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.955599] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07210470-d769-43e0-8d38-b076c374d203] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2190.956082] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2190.956313] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2190.956654] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2190.956923] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-949ebcbf-e965-40e3-a144-2e919a7ae618 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.962096] env[61573]: DEBUG oslo_vmware.api [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Waiting for the task: (returnval){ [ 2190.962096] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5245b703-2581-7b82-a6d1-392487d6200c" [ 2190.962096] env[61573]: _type = "Task" [ 2190.962096] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.970922] env[61573]: DEBUG oslo_vmware.api [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5245b703-2581-7b82-a6d1-392487d6200c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.473215] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2191.473487] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2191.473916] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2192.992664] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "2aa8f536-49ba-43f8-8f36-5741e300652a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.403563] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2198.403902] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2199.403679] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2199.403954] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2199.422350] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2199.422350] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2199.422350] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2199.422350] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2199.423491] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698a9c29-3d0c-4d44-a5fd-752855dc929e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.434459] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c208cd-f808-4377-904a-01803d2b3212 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.454406] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4bbb6c-d41a-4bcd-b3d2-c20ded8ce872 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.462228] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153f8a3a-823b-46f7-aeeb-cc0daca5ebd4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.497268] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180553MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2199.497426] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2199.497629] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2199.581302] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance cef978e5-e61d-4188-a58e-1b5690731c1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.581469] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.581598] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.581737] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.581818] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.581943] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.582241] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2aa8f536-49ba-43f8-8f36-5741e300652a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.582241] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d892e1ae-e434-40b2-b86a-99c55d473363 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.582373] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.582440] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 07210470-d769-43e0-8d38-b076c374d203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2199.582636] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2199.582907] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '84', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '7', 'num_os_type_None': '10', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '2', 'io_workload': '10', 'num_proj_ad53ba52560d475e9c8a48903da448ec': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_7b0a7b0d400a42c9b32dc3f491d17d74': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_task_spawning': '3', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_9f5bf0addc41483c9bad3c1497811f08': '1', 'num_proj_f5656651931541f9b48c3e185a46a113': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2199.727368] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2ef2af-ed51-4e0c-b9c1-58064007fd65 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.735424] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c40ee1-7dd4-4a7a-bdc4-02e185887250 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.766925] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172fcb9c-50b0-4107-b9fe-240df9e7464e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.775024] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2060d5-e9bf-476a-932a-375b2ab2a7cb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.789500] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2199.799544] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2199.813721] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2199.814028] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.316s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.816069] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2200.816069] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2200.816069] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2200.837263] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.837414] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.837540] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.837661] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.837778] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.837895] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c3053874-e935-40c5-ac81-268e759611f1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.838267] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.838414] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.838532] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.838648] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 07210470-d769-43e0-8d38-b076c374d203] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2200.838763] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2200.839290] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.367784] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "220b6e63-be92-4ac1-9561-f2967b361eee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2201.368021] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Lock "220b6e63-be92-4ac1-9561-f2967b361eee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.424381] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.404407] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2204.400125] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2204.403783] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.403720] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.103696] env[61573]: DEBUG oslo_concurrency.lockutils [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "d892e1ae-e434-40b2-b86a-99c55d473363" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.035533] env[61573]: WARNING oslo_vmware.rw_handles [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2235.035533] env[61573]: ERROR oslo_vmware.rw_handles [ 2235.036181] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/980292e1-fe5b-40ce-8d0a-efaf2de418f9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2235.038119] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2235.038400] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Copying Virtual Disk [datastore2] vmware_temp/980292e1-fe5b-40ce-8d0a-efaf2de418f9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/980292e1-fe5b-40ce-8d0a-efaf2de418f9/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2235.038733] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-079a170c-001d-48f1-9772-c04aafc81f0a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.047748] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 2235.047748] env[61573]: value = "task-4836936" [ 2235.047748] env[61573]: _type = "Task" [ 2235.047748] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.056267] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.558364] env[61573]: DEBUG oslo_vmware.exceptions [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2235.558604] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2235.559193] env[61573]: ERROR nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2235.559193] env[61573]: Faults: ['InvalidArgument'] [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Traceback (most recent call last): [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] yield resources [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] self.driver.spawn(context, instance, image_meta, [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] self._fetch_image_if_missing(context, vi) [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] image_cache(vi, tmp_image_ds_loc) [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] vm_util.copy_virtual_disk( [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] session._wait_for_task(vmdk_copy_task) [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] return self.wait_for_task(task_ref) [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] return evt.wait() [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] result = hub.switch() [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] return self.greenlet.switch() [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] self.f(*self.args, **self.kw) [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] raise exceptions.translate_fault(task_info.error) [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Faults: ['InvalidArgument'] [ 2235.559193] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] [ 2235.560224] env[61573]: INFO nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Terminating instance [ 2235.561227] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2235.561437] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2235.561685] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7477c6b2-2c49-460f-b554-e9312c395764 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.563973] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2235.564221] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2235.564997] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c3ef15-b44b-4e8b-88ec-d9f50877a1b0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.572442] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2235.572685] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-981bd529-ae27-4542-828f-450bd44f4bf5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.575195] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2235.575382] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2235.576451] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d145a563-eae1-4f8b-9ab2-84c558afd7ec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.581799] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 2235.581799] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c0b652-9707-cda6-a3fd-aec6c6554f6b" [ 2235.581799] env[61573]: _type = "Task" [ 2235.581799] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.590129] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52c0b652-9707-cda6-a3fd-aec6c6554f6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.644015] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2235.644289] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2235.644463] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleting the datastore file [datastore2] cef978e5-e61d-4188-a58e-1b5690731c1b {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2235.644747] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cc00d72-d71f-4242-ba4f-1066cda2fddc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.651490] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 2235.651490] env[61573]: value = "task-4836938" [ 2235.651490] env[61573]: _type = "Task" [ 2235.651490] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.659892] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.092046] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2236.092400] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating directory with path [datastore2] vmware_temp/adfc8e60-338a-4ab4-930b-5bd35c724ac3/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2236.092400] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4b035ea-7914-4d49-9243-0465750443ba {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.105180] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Created directory with path [datastore2] vmware_temp/adfc8e60-338a-4ab4-930b-5bd35c724ac3/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2236.105401] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Fetch image to [datastore2] vmware_temp/adfc8e60-338a-4ab4-930b-5bd35c724ac3/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2236.105573] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/adfc8e60-338a-4ab4-930b-5bd35c724ac3/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2236.106406] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5df08d-9e51-4e60-8518-e622bb48c094 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.113679] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e068c2-36af-46d5-a50a-4fb29f224e29 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.124444] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162114d1-7a6a-420f-849d-4f00a7c39b1f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.160606] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef77442-35f9-471a-a3cc-d8b420b40617 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.170398] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5d08f41f-5004-40f0-988e-3da91c00ded2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.172396] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081005} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2236.172703] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2236.172892] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2236.173076] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2236.173257] env[61573]: INFO nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2236.175541] env[61573]: DEBUG nova.compute.claims [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2236.175744] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.175983] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.194854] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2236.252978] env[61573]: DEBUG oslo_vmware.rw_handles [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/adfc8e60-338a-4ab4-930b-5bd35c724ac3/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2236.313906] env[61573]: DEBUG oslo_vmware.rw_handles [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2236.314656] env[61573]: DEBUG oslo_vmware.rw_handles [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/adfc8e60-338a-4ab4-930b-5bd35c724ac3/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2236.433687] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f4c063-c9e9-4655-8129-f5574e73bb49 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.441103] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc6ccde-ec46-4318-8c30-fab56ca0e6d6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.481099] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54af4bff-a184-420f-8e14-e04acdd7fee5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.489983] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6b281c-f0bc-45f1-acd9-39b1325c80d0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.504930] env[61573]: DEBUG nova.compute.provider_tree [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2236.514036] env[61573]: DEBUG nova.scheduler.client.report [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2236.530946] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.355s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.531530] env[61573]: ERROR nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2236.531530] env[61573]: Faults: ['InvalidArgument'] [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Traceback (most recent call last): [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] self.driver.spawn(context, instance, image_meta, [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] self._fetch_image_if_missing(context, vi) [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] image_cache(vi, tmp_image_ds_loc) [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] vm_util.copy_virtual_disk( [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] session._wait_for_task(vmdk_copy_task) [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] return self.wait_for_task(task_ref) [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] return evt.wait() [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] result = hub.switch() [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] return self.greenlet.switch() [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] self.f(*self.args, **self.kw) [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] raise exceptions.translate_fault(task_info.error) [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Faults: ['InvalidArgument'] [ 2236.531530] env[61573]: ERROR nova.compute.manager [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] [ 2236.532353] env[61573]: DEBUG nova.compute.utils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2236.534130] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Build of instance cef978e5-e61d-4188-a58e-1b5690731c1b was re-scheduled: A specified parameter was not correct: fileType [ 2236.534130] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2236.534515] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2236.534727] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2236.534912] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2236.535099] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2236.813228] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.824882] env[61573]: INFO nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Took 0.29 seconds to deallocate network for instance. [ 2236.915619] env[61573]: INFO nova.scheduler.client.report [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleted allocations for instance cef978e5-e61d-4188-a58e-1b5690731c1b [ 2236.955456] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "cef978e5-e61d-4188-a58e-1b5690731c1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 487.947s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.956929] env[61573]: DEBUG oslo_concurrency.lockutils [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "cef978e5-e61d-4188-a58e-1b5690731c1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 291.978s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.957199] env[61573]: DEBUG oslo_concurrency.lockutils [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "cef978e5-e61d-4188-a58e-1b5690731c1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.957457] env[61573]: DEBUG oslo_concurrency.lockutils [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "cef978e5-e61d-4188-a58e-1b5690731c1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.957660] env[61573]: DEBUG oslo_concurrency.lockutils [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "cef978e5-e61d-4188-a58e-1b5690731c1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.960546] env[61573]: INFO nova.compute.manager [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Terminating instance [ 2236.962359] env[61573]: DEBUG nova.compute.manager [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2236.962576] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2236.963163] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce38f871-06de-407c-842c-275ffb3f2fc8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.973762] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6843149a-b28c-414c-98b0-a94ccdddb700 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.006415] env[61573]: DEBUG nova.compute.manager [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2237.010099] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cef978e5-e61d-4188-a58e-1b5690731c1b could not be found. [ 2237.010247] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2237.010434] env[61573]: INFO nova.compute.manager [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2237.010690] env[61573]: DEBUG oslo.service.loopingcall [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2237.010954] env[61573]: DEBUG nova.compute.manager [-] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2237.011061] env[61573]: DEBUG nova.network.neutron [-] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2237.033472] env[61573]: DEBUG nova.network.neutron [-] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2237.044367] env[61573]: INFO nova.compute.manager [-] [instance: cef978e5-e61d-4188-a58e-1b5690731c1b] Took 0.03 seconds to deallocate network for instance. [ 2237.066216] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.066509] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.068219] env[61573]: INFO nova.compute.claims [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2237.185815] env[61573]: DEBUG oslo_concurrency.lockutils [None req-876f930e-20ef-4b2c-9b50-f169163fa6d8 tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "cef978e5-e61d-4188-a58e-1b5690731c1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.266383] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e9b9dc-5bcf-471d-b611-144faef83127 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.274530] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc82e17a-de27-4fe3-b831-7fd092c7fd38 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.306335] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527efca7-12ea-4b1a-9d26-e945a3ba1728 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.313991] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1eee18a-831f-4ab3-94fa-93ba4a1dba13 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.327624] env[61573]: DEBUG nova.compute.provider_tree [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2237.337450] env[61573]: DEBUG nova.scheduler.client.report [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2237.351250] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.285s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.351807] env[61573]: DEBUG nova.compute.manager [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2237.385348] env[61573]: DEBUG nova.compute.utils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2237.387333] env[61573]: DEBUG nova.compute.manager [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2237.387609] env[61573]: DEBUG nova.network.neutron [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2237.397040] env[61573]: DEBUG nova.compute.manager [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2237.465894] env[61573]: DEBUG nova.compute.manager [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2237.469803] env[61573]: DEBUG nova.policy [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '468062a6200749a886894f41bf8063de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d80b82b526a4ca9b79db7d72896c393', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 2237.494471] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2237.494761] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2237.494946] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2237.495188] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2237.495387] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2237.495556] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2237.495769] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2237.495932] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2237.496122] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2237.496351] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2237.496562] env[61573]: DEBUG nova.virt.hardware [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2237.497567] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20ad8a0-ab3f-4946-b5df-ac946e6f5c41 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.510429] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2106c609-5b22-4076-a6b3-c55323aa3dec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.890491] env[61573]: DEBUG nova.network.neutron [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Successfully created port: 42bf2292-b362-41e4-a771-a2e9d8169e80 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2238.574537] env[61573]: DEBUG nova.network.neutron [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Successfully updated port: 42bf2292-b362-41e4-a771-a2e9d8169e80 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2238.590921] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "refresh_cache-220b6e63-be92-4ac1-9561-f2967b361eee" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2238.590921] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquired lock "refresh_cache-220b6e63-be92-4ac1-9561-f2967b361eee" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2238.590921] env[61573]: DEBUG nova.network.neutron [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2238.638145] env[61573]: DEBUG nova.network.neutron [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2238.903335] env[61573]: DEBUG nova.network.neutron [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Updating instance_info_cache with network_info: [{"id": "42bf2292-b362-41e4-a771-a2e9d8169e80", "address": "fa:16:3e:d8:37:c1", "network": {"id": "d1e7930d-0900-44bf-b3a0-3e89d3ab1cc9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1616538157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d80b82b526a4ca9b79db7d72896c393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42bf2292-b3", "ovs_interfaceid": "42bf2292-b362-41e4-a771-a2e9d8169e80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2238.916051] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Releasing lock "refresh_cache-220b6e63-be92-4ac1-9561-f2967b361eee" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2238.916222] env[61573]: DEBUG nova.compute.manager [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Instance network_info: |[{"id": "42bf2292-b362-41e4-a771-a2e9d8169e80", "address": "fa:16:3e:d8:37:c1", "network": {"id": "d1e7930d-0900-44bf-b3a0-3e89d3ab1cc9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1616538157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d80b82b526a4ca9b79db7d72896c393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42bf2292-b3", "ovs_interfaceid": "42bf2292-b362-41e4-a771-a2e9d8169e80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2238.916651] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:37:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42bf2292-b362-41e4-a771-a2e9d8169e80', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2238.926864] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Creating folder: Project (9d80b82b526a4ca9b79db7d72896c393). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2238.928851] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc036d6a-f3c1-4941-ad75-7b5fafea15c4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.941347] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Created folder: Project (9d80b82b526a4ca9b79db7d72896c393) in parent group-v942801. [ 2238.941782] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Creating folder: Instances. Parent ref: group-v942919. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2238.942122] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40b8fad8-173b-4a76-9227-370a95adf638 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.951299] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Created folder: Instances in parent group-v942919. [ 2238.951561] env[61573]: DEBUG oslo.service.loopingcall [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2238.951747] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2238.951956] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ca8dd4a-395c-4053-815e-6f0170c40c66 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.974740] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2238.974740] env[61573]: value = "task-4836941" [ 2238.974740] env[61573]: _type = "Task" [ 2238.974740] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2238.984537] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836941, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.014233] env[61573]: DEBUG nova.compute.manager [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Received event network-vif-plugged-42bf2292-b362-41e4-a771-a2e9d8169e80 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2239.014909] env[61573]: DEBUG oslo_concurrency.lockutils [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] Acquiring lock "220b6e63-be92-4ac1-9561-f2967b361eee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2239.014909] env[61573]: DEBUG oslo_concurrency.lockutils [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] Lock "220b6e63-be92-4ac1-9561-f2967b361eee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2239.014909] env[61573]: DEBUG oslo_concurrency.lockutils [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] Lock "220b6e63-be92-4ac1-9561-f2967b361eee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2239.015220] env[61573]: DEBUG nova.compute.manager [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] No waiting events found dispatching network-vif-plugged-42bf2292-b362-41e4-a771-a2e9d8169e80 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2239.015659] env[61573]: WARNING nova.compute.manager [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Received unexpected event network-vif-plugged-42bf2292-b362-41e4-a771-a2e9d8169e80 for instance with vm_state building and task_state spawning. [ 2239.015960] env[61573]: DEBUG nova.compute.manager [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Received event network-changed-42bf2292-b362-41e4-a771-a2e9d8169e80 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2239.016213] env[61573]: DEBUG nova.compute.manager [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Refreshing instance network info cache due to event network-changed-42bf2292-b362-41e4-a771-a2e9d8169e80. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 2239.016515] env[61573]: DEBUG oslo_concurrency.lockutils [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] Acquiring lock "refresh_cache-220b6e63-be92-4ac1-9561-f2967b361eee" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2239.016714] env[61573]: DEBUG oslo_concurrency.lockutils [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] Acquired lock "refresh_cache-220b6e63-be92-4ac1-9561-f2967b361eee" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2239.016938] env[61573]: DEBUG nova.network.neutron [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Refreshing network info cache for port 42bf2292-b362-41e4-a771-a2e9d8169e80 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2239.489219] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836941, 'name': CreateVM_Task, 'duration_secs': 0.311745} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2239.489421] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2239.490128] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2239.490302] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2239.490635] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2239.490895] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2649cf2-ef17-45a2-8337-d8730371e548 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.495978] env[61573]: DEBUG oslo_vmware.api [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Waiting for the task: (returnval){ [ 2239.495978] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]521be5fa-c9cd-1042-4553-db75ea83c71c" [ 2239.495978] env[61573]: _type = "Task" [ 2239.495978] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.506930] env[61573]: DEBUG oslo_vmware.api [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]521be5fa-c9cd-1042-4553-db75ea83c71c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.542104] env[61573]: DEBUG nova.network.neutron [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Updated VIF entry in instance network info cache for port 42bf2292-b362-41e4-a771-a2e9d8169e80. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2239.542684] env[61573]: DEBUG nova.network.neutron [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Updating instance_info_cache with network_info: [{"id": "42bf2292-b362-41e4-a771-a2e9d8169e80", "address": "fa:16:3e:d8:37:c1", "network": {"id": "d1e7930d-0900-44bf-b3a0-3e89d3ab1cc9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1616538157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d80b82b526a4ca9b79db7d72896c393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42bf2292-b3", "ovs_interfaceid": "42bf2292-b362-41e4-a771-a2e9d8169e80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2239.551952] env[61573]: DEBUG oslo_concurrency.lockutils [req-e95bbfbb-963e-4707-bade-6c765edae7eb req-5fd0cc9a-7b9d-4819-911b-ba34a86bcb62 service nova] Releasing lock "refresh_cache-220b6e63-be92-4ac1-9561-f2967b361eee" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2240.011066] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2240.012701] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2240.012972] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2258.405141] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.405141] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2260.404259] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.404657] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.404657] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.417248] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.417453] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2260.417625] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2260.417807] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2260.418922] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd6d146-d8ff-4bc1-95f2-e32ac34dc68e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.427620] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c36faaf-e9b8-4250-bf0e-cbf17469319d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.443204] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0331de22-13ab-43a7-ac51-9856f069cbe1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.450348] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d9c027-6b70-4c07-b1d0-052d97b81978 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.479197] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180545MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2260.479361] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.479545] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2260.555042] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.555219] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.555339] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.555460] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.555579] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.555697] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2aa8f536-49ba-43f8-8f36-5741e300652a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.555815] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d892e1ae-e434-40b2-b86a-99c55d473363 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.555931] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.556064] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 07210470-d769-43e0-8d38-b076c374d203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.556184] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 220b6e63-be92-4ac1-9561-f2967b361eee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2260.556375] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2260.556523] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '85', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '7', 'num_os_type_None': '10', 'num_proj_c6cc1ffdc2dc45ad85b0be67c4c8b6c1': '1', 'io_workload': '10', 'num_proj_ad53ba52560d475e9c8a48903da448ec': '1', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_7b0a7b0d400a42c9b32dc3f491d17d74': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_task_spawning': '3', 'num_proj_9f5bf0addc41483c9bad3c1497811f08': '1', 'num_proj_f5656651931541f9b48c3e185a46a113': '1', 'num_proj_9d80b82b526a4ca9b79db7d72896c393': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2260.676029] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00195407-758d-4b41-b239-a643d9b15e69 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.683799] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d0593e-719e-498e-a431-8c58d42d7879 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.714335] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da5c84c-3221-48e1-a9a4-5c047671a105 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.722225] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47f7b90-c47b-4057-a642-06452b89ec46 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.736668] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2260.744789] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2260.757889] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2260.758087] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.279s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.758779] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2261.759197] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2261.759197] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2261.779165] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.779320] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.779445] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.779571] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.779696] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c3053874-e935-40c5-ac81-268e759611f1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.779818] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.779938] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.780065] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.780183] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 07210470-d769-43e0-8d38-b076c374d203] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.780300] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2261.780416] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2264.404366] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.404800] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.398859] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.403549] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.034204] env[61573]: WARNING oslo_vmware.rw_handles [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2282.034204] env[61573]: ERROR oslo_vmware.rw_handles [ 2282.034992] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/adfc8e60-338a-4ab4-930b-5bd35c724ac3/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2282.036902] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2282.037194] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Copying Virtual Disk [datastore2] vmware_temp/adfc8e60-338a-4ab4-930b-5bd35c724ac3/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/adfc8e60-338a-4ab4-930b-5bd35c724ac3/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2282.037485] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc944385-0dc7-4a68-9271-d35ab047f4e4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.045797] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 2282.045797] env[61573]: value = "task-4836942" [ 2282.045797] env[61573]: _type = "Task" [ 2282.045797] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.054209] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.556500] env[61573]: DEBUG oslo_vmware.exceptions [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2282.556797] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2282.557403] env[61573]: ERROR nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2282.557403] env[61573]: Faults: ['InvalidArgument'] [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Traceback (most recent call last): [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] yield resources [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] self.driver.spawn(context, instance, image_meta, [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] self._fetch_image_if_missing(context, vi) [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] image_cache(vi, tmp_image_ds_loc) [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] vm_util.copy_virtual_disk( [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] session._wait_for_task(vmdk_copy_task) [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] return self.wait_for_task(task_ref) [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] return evt.wait() [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] result = hub.switch() [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] return self.greenlet.switch() [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] self.f(*self.args, **self.kw) [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] raise exceptions.translate_fault(task_info.error) [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Faults: ['InvalidArgument'] [ 2282.557403] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] [ 2282.558293] env[61573]: INFO nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Terminating instance [ 2282.559317] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.559548] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2282.559794] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2944070-6a60-4941-be29-d974c2ec2a57 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.562170] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2282.562363] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2282.563125] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e64832-9fde-4b41-ba25-00f6b1250308 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.571034] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2282.571299] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-206135c5-bfc8-4a66-822f-f8b9d879c325 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.573787] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2282.573980] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2282.574994] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88b39919-b16b-4917-9988-134fd4b94031 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.580676] env[61573]: DEBUG oslo_vmware.api [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Waiting for the task: (returnval){ [ 2282.580676] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5254ba2a-7f3c-690d-d68d-dfe11c36cf52" [ 2282.580676] env[61573]: _type = "Task" [ 2282.580676] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.589035] env[61573]: DEBUG oslo_vmware.api [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5254ba2a-7f3c-690d-d68d-dfe11c36cf52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.700211] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2282.700442] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2282.700624] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleting the datastore file [datastore2] 4491bc7f-5014-4631-a7ec-486928ef0cf9 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2282.700906] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84698efd-92c4-46db-a5b7-79548f164624 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.706969] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for the task: (returnval){ [ 2282.706969] env[61573]: value = "task-4836944" [ 2282.706969] env[61573]: _type = "Task" [ 2282.706969] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.715723] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.090799] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2283.091203] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Creating directory with path [datastore2] vmware_temp/3b6c6fbd-fb2a-431d-a297-d9bf552e5808/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2283.091301] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3343471-2979-4a8a-afc1-b38b3ce030d6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.102907] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Created directory with path [datastore2] vmware_temp/3b6c6fbd-fb2a-431d-a297-d9bf552e5808/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2283.103114] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Fetch image to [datastore2] vmware_temp/3b6c6fbd-fb2a-431d-a297-d9bf552e5808/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2283.103290] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/3b6c6fbd-fb2a-431d-a297-d9bf552e5808/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2283.104048] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb066a40-9d99-4de8-abed-42f584e3f4f2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.112640] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60102191-ccfc-4ad2-834b-2727bfb83ebc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.123196] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33aaf000-712b-4882-8359-f12f17526ed0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.154448] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e5f46f-1a27-4d2e-aff2-e52496f8657f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.161183] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5af4a6e7-1d31-4a14-ba5e-55e985aae867 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.184168] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2283.221431] env[61573]: DEBUG oslo_vmware.api [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Task: {'id': task-4836944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084685} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.221723] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2283.221910] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2283.222118] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2283.222313] env[61573]: INFO nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Took 0.66 seconds to destroy the instance on the hypervisor. [ 2283.224549] env[61573]: DEBUG nova.compute.claims [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2283.224730] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.224954] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.243526] env[61573]: DEBUG oslo_vmware.rw_handles [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3b6c6fbd-fb2a-431d-a297-d9bf552e5808/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2283.306269] env[61573]: DEBUG oslo_vmware.rw_handles [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2283.306463] env[61573]: DEBUG oslo_vmware.rw_handles [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3b6c6fbd-fb2a-431d-a297-d9bf552e5808/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2283.433764] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235fe05b-f192-4dd0-9c2e-48259e59797f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.441920] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2778ecac-466b-4e48-99e0-795ad83222f4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.473507] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894059d4-823a-4de1-982f-9d3d1470b617 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.482705] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fc0f34-4092-45e1-8dca-de410f31b60f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.497090] env[61573]: DEBUG nova.compute.provider_tree [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2283.509100] env[61573]: DEBUG nova.scheduler.client.report [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2283.531193] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.306s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.531766] env[61573]: ERROR nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2283.531766] env[61573]: Faults: ['InvalidArgument'] [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Traceback (most recent call last): [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] self.driver.spawn(context, instance, image_meta, [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] self._fetch_image_if_missing(context, vi) [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] image_cache(vi, tmp_image_ds_loc) [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] vm_util.copy_virtual_disk( [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] session._wait_for_task(vmdk_copy_task) [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] return self.wait_for_task(task_ref) [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] return evt.wait() [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] result = hub.switch() [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] return self.greenlet.switch() [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] self.f(*self.args, **self.kw) [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] raise exceptions.translate_fault(task_info.error) [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Faults: ['InvalidArgument'] [ 2283.531766] env[61573]: ERROR nova.compute.manager [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] [ 2283.532536] env[61573]: DEBUG nova.compute.utils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2283.534091] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Build of instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 was re-scheduled: A specified parameter was not correct: fileType [ 2283.534091] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2283.534478] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2283.534652] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2283.534844] env[61573]: DEBUG nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2283.535052] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2283.937430] env[61573]: DEBUG nova.network.neutron [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2283.951265] env[61573]: INFO nova.compute.manager [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Took 0.42 seconds to deallocate network for instance. [ 2284.070944] env[61573]: INFO nova.scheduler.client.report [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Deleted allocations for instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 [ 2284.094340] env[61573]: DEBUG oslo_concurrency.lockutils [None req-673666ad-9fe1-4454-b957-51d17c564acb tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "4491bc7f-5014-4631-a7ec-486928ef0cf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 535.046s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2284.094693] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "4491bc7f-5014-4631-a7ec-486928ef0cf9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 339.205s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.094854] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Acquiring lock "4491bc7f-5014-4631-a7ec-486928ef0cf9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2284.095118] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "4491bc7f-5014-4631-a7ec-486928ef0cf9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.095306] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "4491bc7f-5014-4631-a7ec-486928ef0cf9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2284.097426] env[61573]: INFO nova.compute.manager [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Terminating instance [ 2284.099690] env[61573]: DEBUG nova.compute.manager [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2284.099947] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2284.100565] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63b8d569-ab27-4664-a697-b56f289d0d5a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.111534] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58f0a27-3746-46f5-bac9-9cfe2f640846 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.143876] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4491bc7f-5014-4631-a7ec-486928ef0cf9 could not be found. [ 2284.144187] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2284.144399] env[61573]: INFO nova.compute.manager [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2284.144654] env[61573]: DEBUG oslo.service.loopingcall [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2284.144910] env[61573]: DEBUG nova.compute.manager [-] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2284.145065] env[61573]: DEBUG nova.network.neutron [-] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2284.171567] env[61573]: DEBUG nova.network.neutron [-] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.180332] env[61573]: INFO nova.compute.manager [-] [instance: 4491bc7f-5014-4631-a7ec-486928ef0cf9] Took 0.04 seconds to deallocate network for instance. [ 2284.276326] env[61573]: DEBUG oslo_concurrency.lockutils [None req-4d405a1b-10a4-4b67-be1d-88bd2588201f tempest-MultipleCreateTestJSON-1899361361 tempest-MultipleCreateTestJSON-1899361361-project-member] Lock "4491bc7f-5014-4631-a7ec-486928ef0cf9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.404615] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2310.404895] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11295}} [ 2310.414769] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] There are 0 instances to clean {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11304}} [ 2317.405555] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2317.405859] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances with incomplete migration {{(pid=61573) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11333}} [ 2320.414112] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.414401] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.414556] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2321.404668] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2321.404870] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2321.405066] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2321.424843] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2321.425213] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2321.425213] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2321.425285] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c3053874-e935-40c5-ac81-268e759611f1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2321.425455] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2321.425609] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2321.425742] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2321.425881] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 07210470-d769-43e0-8d38-b076c374d203] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2321.426014] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2321.426112] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2321.426654] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2321.438736] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2321.439034] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2321.439168] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2321.439364] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2321.440465] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8290a128-a6cb-412f-8474-e10101f39fec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.449521] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947775fe-a662-47f3-8712-390c52f37bf5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.466029] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa5bb40-fb5f-4ad7-9aa5-43be47a20f32 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.473518] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83ea7e6-d70a-4759-8d62-54663dfd8f7e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.504853] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180518MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2321.504853] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2321.505116] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2321.656327] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2321.656501] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2dc713f6-b67b-4360-a751-29b7218e130a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2321.656632] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2321.656754] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2321.656874] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2aa8f536-49ba-43f8-8f36-5741e300652a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2321.656988] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d892e1ae-e434-40b2-b86a-99c55d473363 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2321.657120] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2321.657236] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 07210470-d769-43e0-8d38-b076c374d203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2321.657349] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 220b6e63-be92-4ac1-9561-f2967b361eee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2321.657582] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2321.657742] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '86', 'num_instances': '9', 'num_vm_building': '9', 'num_task_deleting': '6', 'num_os_type_None': '9', 'num_proj_ad53ba52560d475e9c8a48903da448ec': '1', 'io_workload': '9', 'num_proj_0341bbb698194bf6a4cbca166a5dfffe': '1', 'num_proj_d8caf6003840413c8eff7d84d9b185cb': '1', 'num_proj_7b0a7b0d400a42c9b32dc3f491d17d74': '1', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_task_spawning': '3', 'num_proj_9f5bf0addc41483c9bad3c1497811f08': '1', 'num_proj_f5656651931541f9b48c3e185a46a113': '1', 'num_proj_9d80b82b526a4ca9b79db7d72896c393': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2321.674452] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing inventories for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2321.688854] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Updating ProviderTree inventory for provider b1eff98b-2b30-4574-a87d-d151235a2dba from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2321.689054] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Updating inventory in ProviderTree for provider b1eff98b-2b30-4574-a87d-d151235a2dba with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2321.700173] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing aggregate associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, aggregates: None {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2321.718070] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Refreshing trait associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2321.834519] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c34b22f-2cb7-416c-9eef-927d2d24510d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.842836] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957d2c2f-733d-41a5-aa5a-50dc43f5e296 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.872473] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915c961c-cb27-468c-b9d6-c00a1e464405 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.880473] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2069384-7b33-443c-b9cc-5f885598f4fe {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.893931] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2321.905965] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2321.921214] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2321.921412] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.416s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2322.898995] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2324.403697] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.399967] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2326.403588] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2326.403900] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2327.400582] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2331.600065] env[61573]: WARNING oslo_vmware.rw_handles [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2331.600065] env[61573]: ERROR oslo_vmware.rw_handles [ 2331.600688] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/3b6c6fbd-fb2a-431d-a297-d9bf552e5808/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2331.602402] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2331.602648] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Copying Virtual Disk [datastore2] vmware_temp/3b6c6fbd-fb2a-431d-a297-d9bf552e5808/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/3b6c6fbd-fb2a-431d-a297-d9bf552e5808/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2331.602940] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9d0bb2a-2e02-4f1b-9a0f-15b42354cc4b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.610746] env[61573]: DEBUG oslo_vmware.api [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Waiting for the task: (returnval){ [ 2331.610746] env[61573]: value = "task-4836945" [ 2331.610746] env[61573]: _type = "Task" [ 2331.610746] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.619178] env[61573]: DEBUG oslo_vmware.api [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Task: {'id': task-4836945, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.121411] env[61573]: DEBUG oslo_vmware.exceptions [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2332.121694] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2332.122288] env[61573]: ERROR nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2332.122288] env[61573]: Faults: ['InvalidArgument'] [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Traceback (most recent call last): [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] yield resources [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] self.driver.spawn(context, instance, image_meta, [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] self._fetch_image_if_missing(context, vi) [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] image_cache(vi, tmp_image_ds_loc) [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] vm_util.copy_virtual_disk( [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] session._wait_for_task(vmdk_copy_task) [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] return self.wait_for_task(task_ref) [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] return evt.wait() [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] result = hub.switch() [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] return self.greenlet.switch() [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] self.f(*self.args, **self.kw) [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] raise exceptions.translate_fault(task_info.error) [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Faults: ['InvalidArgument'] [ 2332.122288] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] [ 2332.123341] env[61573]: INFO nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Terminating instance [ 2332.124219] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2332.124422] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2332.124662] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-565ea1fe-6ea3-4611-882e-fc9f2d0282a7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.128289] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2332.128447] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2332.129222] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecaef71-ddbd-4216-86ea-e4494d319397 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.136195] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2332.136448] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56062174-f10d-45a7-8dfd-3a7f3e6be8e1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.138942] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2332.139163] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2332.139845] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d65afb93-8781-4e9a-a2d4-1b87fc8b0a8a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.145356] env[61573]: DEBUG oslo_vmware.api [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for the task: (returnval){ [ 2332.145356] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52571ad5-70a1-8937-b417-c8f796e42c26" [ 2332.145356] env[61573]: _type = "Task" [ 2332.145356] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.153124] env[61573]: DEBUG oslo_vmware.api [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52571ad5-70a1-8937-b417-c8f796e42c26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.211092] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2332.211380] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2332.211534] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Deleting the datastore file [datastore2] 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2332.211789] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9776fb4d-22b5-4201-834e-60b52b97ed5d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.218147] env[61573]: DEBUG oslo_vmware.api [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Waiting for the task: (returnval){ [ 2332.218147] env[61573]: value = "task-4836947" [ 2332.218147] env[61573]: _type = "Task" [ 2332.218147] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.226508] env[61573]: DEBUG oslo_vmware.api [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Task: {'id': task-4836947, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.655798] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2332.656126] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Creating directory with path [datastore2] vmware_temp/5bb73574-21b2-427c-a0fb-9842cd1405a1/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2332.656332] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a1fa3c8-0c7f-4a1c-9570-045d84f45394 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.667897] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Created directory with path [datastore2] vmware_temp/5bb73574-21b2-427c-a0fb-9842cd1405a1/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2332.668134] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Fetch image to [datastore2] vmware_temp/5bb73574-21b2-427c-a0fb-9842cd1405a1/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2332.668284] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/5bb73574-21b2-427c-a0fb-9842cd1405a1/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2332.669053] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2805edd-b9f8-4f9c-b1cf-8b9d758530e4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.676053] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a41fcc-9335-46fc-b974-3ced9a8e4017 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.685481] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2390788-9080-4db7-aeef-f6ff5c961730 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.717060] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80d9c14-2a96-4e4a-8f53-495ff2446ae6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.728867] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-74420e01-3e57-4109-b6a9-82acb09f908e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.730770] env[61573]: DEBUG oslo_vmware.api [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Task: {'id': task-4836947, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077322} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.731041] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2332.731227] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2332.731400] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2332.731574] env[61573]: INFO nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2332.734126] env[61573]: DEBUG nova.compute.claims [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2332.734311] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2332.734529] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.752427] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2332.912426] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a950c4ae-9f20-4cc7-b21c-f6c2893fe690 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.920893] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc49fd6-6516-4efe-a101-7bb02a46fc12 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.926704] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2332.927569] env[61573]: ERROR nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] result = getattr(controller, method)(*args, **kwargs) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._get(image_id) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] resp, body = self.http_client.get(url, headers=header) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.request(url, 'GET', **kwargs) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._handle_response(resp) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise exc.from_response(resp, resp.content) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] During handling of the above exception, another exception occurred: [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] yield resources [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self.driver.spawn(context, instance, image_meta, [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._fetch_image_if_missing(context, vi) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] image_fetch(context, vi, tmp_image_ds_loc) [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] images.fetch_image( [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2332.927569] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] metadata = IMAGE_API.get(context, image_ref) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return session.show(context, image_id, [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] _reraise_translated_image_exception(image_id) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise new_exc.with_traceback(exc_trace) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] result = getattr(controller, method)(*args, **kwargs) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._get(image_id) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] resp, body = self.http_client.get(url, headers=header) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.request(url, 'GET', **kwargs) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._handle_response(resp) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise exc.from_response(resp, resp.content) [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 2332.928657] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2332.928657] env[61573]: INFO nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Terminating instance [ 2332.929474] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2332.929684] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2332.954260] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2332.954431] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2332.954729] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd0dc531-00f0-418c-827a-e917f6d3ed7f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.957477] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ddc3c90-5744-451f-840e-96e51a28f786 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.960495] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e831b67a-695e-4a21-86dd-812356b66908 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.968736] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2332.970781] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9eb81b14-ac20-4370-92bc-2111bba7acd5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.974192] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2332.974421] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2332.977595] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c19755ab-0d37-49f5-b833-464c0f402805 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.981214] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e505c3b8-3603-459a-885b-e13853b87a30 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.991427] env[61573]: DEBUG oslo_vmware.api [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for the task: (returnval){ [ 2332.991427] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]528bafa3-5074-0c26-0f53-8351621615e3" [ 2332.991427] env[61573]: _type = "Task" [ 2332.991427] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.000641] env[61573]: DEBUG nova.compute.provider_tree [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2333.012324] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2333.012625] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Creating directory with path [datastore2] vmware_temp/bb64034d-efce-4211-af16-181448bfcfcb/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2333.013371] env[61573]: DEBUG nova.scheduler.client.report [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2333.016372] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d182f401-afe6-4671-8290-8e7db329fde9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.030935] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.031554] env[61573]: ERROR nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2333.031554] env[61573]: Faults: ['InvalidArgument'] [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Traceback (most recent call last): [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] self.driver.spawn(context, instance, image_meta, [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] self._fetch_image_if_missing(context, vi) [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] image_cache(vi, tmp_image_ds_loc) [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] vm_util.copy_virtual_disk( [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] session._wait_for_task(vmdk_copy_task) [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] return self.wait_for_task(task_ref) [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] return evt.wait() [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] result = hub.switch() [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] return self.greenlet.switch() [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] self.f(*self.args, **self.kw) [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] raise exceptions.translate_fault(task_info.error) [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Faults: ['InvalidArgument'] [ 2333.031554] env[61573]: ERROR nova.compute.manager [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] [ 2333.032492] env[61573]: DEBUG nova.compute.utils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2333.033959] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Build of instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d was re-scheduled: A specified parameter was not correct: fileType [ 2333.033959] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2333.034350] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2333.034560] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2333.034702] env[61573]: DEBUG nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2333.034866] env[61573]: DEBUG nova.network.neutron [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2333.038172] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Created directory with path [datastore2] vmware_temp/bb64034d-efce-4211-af16-181448bfcfcb/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2333.038368] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Fetch image to [datastore2] vmware_temp/bb64034d-efce-4211-af16-181448bfcfcb/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2333.038516] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/bb64034d-efce-4211-af16-181448bfcfcb/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2333.039310] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d492550-66d8-4a68-88af-2b81d418e6b1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.047439] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f40c27c-895f-488a-b2d0-c5760ae1c596 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.058937] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3432734-0f68-4a61-926c-e9192998d1c3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.095243] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61f65d8-d079-4d4a-91f7-25d2eca865c4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.097964] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2333.098184] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2333.098394] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Deleting the datastore file [datastore2] 2dc713f6-b67b-4360-a751-29b7218e130a {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2333.098704] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4455299-4092-4b1f-9596-00f0de8b0255 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.105397] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2867ec73-79e0-4b4e-9614-7b3582b14093 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.107443] env[61573]: DEBUG oslo_vmware.api [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for the task: (returnval){ [ 2333.107443] env[61573]: value = "task-4836949" [ 2333.107443] env[61573]: _type = "Task" [ 2333.107443] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.134115] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2333.190650] env[61573]: DEBUG oslo_vmware.rw_handles [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bb64034d-efce-4211-af16-181448bfcfcb/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2333.250649] env[61573]: DEBUG oslo_vmware.rw_handles [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2333.250890] env[61573]: DEBUG oslo_vmware.rw_handles [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bb64034d-efce-4211-af16-181448bfcfcb/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2333.417486] env[61573]: DEBUG nova.network.neutron [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2333.429168] env[61573]: INFO nova.compute.manager [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Took 0.39 seconds to deallocate network for instance. [ 2333.539518] env[61573]: INFO nova.scheduler.client.report [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Deleted allocations for instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d [ 2333.563129] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d527a67c-81d2-48be-bebc-66325edef781 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 566.494s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.563313] env[61573]: DEBUG oslo_concurrency.lockutils [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 369.883s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.563554] env[61573]: DEBUG oslo_concurrency.lockutils [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Acquiring lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.563768] env[61573]: DEBUG oslo_concurrency.lockutils [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.563939] env[61573]: DEBUG oslo_concurrency.lockutils [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.566574] env[61573]: INFO nova.compute.manager [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Terminating instance [ 2333.568904] env[61573]: DEBUG nova.compute.manager [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2333.569115] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2333.569383] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7441806-5f40-4d61-94b1-b507d646fe32 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.579293] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64510c85-2186-4121-b28e-359e3623075d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.610800] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d could not be found. [ 2333.611404] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2333.611404] env[61573]: INFO nova.compute.manager [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2333.611625] env[61573]: DEBUG oslo.service.loopingcall [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2333.614744] env[61573]: DEBUG nova.compute.manager [-] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2333.614852] env[61573]: DEBUG nova.network.neutron [-] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2333.621667] env[61573]: DEBUG oslo_vmware.api [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Task: {'id': task-4836949, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09379} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.621899] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2333.622089] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2333.622268] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2333.622434] env[61573]: INFO nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Took 0.67 seconds to destroy the instance on the hypervisor. [ 2333.624595] env[61573]: DEBUG nova.compute.claims [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2333.624771] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.624986] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.646052] env[61573]: DEBUG nova.network.neutron [-] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2333.668131] env[61573]: INFO nova.compute.manager [-] [instance: 3d7b1e8d-31ea-44a7-a0a5-b5f552db758d] Took 0.05 seconds to deallocate network for instance. [ 2333.697937] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.701869] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Getting list of instances from cluster (obj){ [ 2333.701869] env[61573]: value = "domain-c8" [ 2333.701869] env[61573]: _type = "ClusterComputeResource" [ 2333.701869] env[61573]: } {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2333.701869] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf9f5ee-a5cc-434f-bdd7-94ae9d0a4e7d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.718477] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Got total of 7 instances {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2333.798980] env[61573]: DEBUG oslo_concurrency.lockutils [None req-14dc7e08-1b6c-43e0-820c-6c847a266b01 tempest-AttachInterfacesUnderV243Test-1333420750 tempest-AttachInterfacesUnderV243Test-1333420750-project-member] Lock "3d7b1e8d-31ea-44a7-a0a5-b5f552db758d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.236s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.806680] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a519f903-9de9-4b50-b276-1ba4006456d0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.815128] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8497448c-d1e7-4443-8d63-f80168280ea1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.847276] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20be43d9-c276-4dbf-995c-3bb484a81c4f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.855309] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f48495-5d8f-4e01-a340-c2afe7f91a7a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.869034] env[61573]: DEBUG nova.compute.provider_tree [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2333.878279] env[61573]: DEBUG nova.scheduler.client.report [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2333.893267] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.268s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.893974] env[61573]: ERROR nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] result = getattr(controller, method)(*args, **kwargs) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._get(image_id) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] resp, body = self.http_client.get(url, headers=header) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.request(url, 'GET', **kwargs) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._handle_response(resp) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise exc.from_response(resp, resp.content) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] During handling of the above exception, another exception occurred: [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self.driver.spawn(context, instance, image_meta, [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._fetch_image_if_missing(context, vi) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] image_fetch(context, vi, tmp_image_ds_loc) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] images.fetch_image( [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] metadata = IMAGE_API.get(context, image_ref) [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2333.893974] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return session.show(context, image_id, [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] _reraise_translated_image_exception(image_id) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise new_exc.with_traceback(exc_trace) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] result = getattr(controller, method)(*args, **kwargs) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._get(image_id) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] resp, body = self.http_client.get(url, headers=header) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.request(url, 'GET', **kwargs) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._handle_response(resp) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise exc.from_response(resp, resp.content) [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 2333.894972] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2333.894972] env[61573]: DEBUG nova.compute.utils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2333.896589] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Build of instance 2dc713f6-b67b-4360-a751-29b7218e130a was re-scheduled: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2333.897040] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2333.897215] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2333.897381] env[61573]: DEBUG nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2333.897599] env[61573]: DEBUG nova.network.neutron [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2334.005370] env[61573]: DEBUG neutronclient.v2_0.client [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61573) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2334.006825] env[61573]: ERROR nova.compute.manager [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] result = getattr(controller, method)(*args, **kwargs) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._get(image_id) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] resp, body = self.http_client.get(url, headers=header) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.request(url, 'GET', **kwargs) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._handle_response(resp) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise exc.from_response(resp, resp.content) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] During handling of the above exception, another exception occurred: [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self.driver.spawn(context, instance, image_meta, [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._fetch_image_if_missing(context, vi) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] image_fetch(context, vi, tmp_image_ds_loc) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] images.fetch_image( [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] metadata = IMAGE_API.get(context, image_ref) [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2334.006825] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return session.show(context, image_id, [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] _reraise_translated_image_exception(image_id) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise new_exc.with_traceback(exc_trace) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] result = getattr(controller, method)(*args, **kwargs) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._get(image_id) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] resp, body = self.http_client.get(url, headers=header) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.request(url, 'GET', **kwargs) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self._handle_response(resp) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise exc.from_response(resp, resp.content) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] nova.exception.ImageNotAuthorized: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] During handling of the above exception, another exception occurred: [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._build_and_run_instance(context, instance, image, [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise exception.RescheduledException( [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] nova.exception.RescheduledException: Build of instance 2dc713f6-b67b-4360-a751-29b7218e130a was re-scheduled: Not authorized for image 896f953b-9c5a-4a3b-82f8-e48debb67b3a. [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] During handling of the above exception, another exception occurred: [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] exception_handler_v20(status_code, error_body) [ 2334.008231] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise client_exc(message=error_message, [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Neutron server returns request_ids: ['req-353cfae9-29e6-4117-ad33-f7fba968f69c'] [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] During handling of the above exception, another exception occurred: [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._deallocate_network(context, instance, requested_networks) [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self.network_api.deallocate_for_instance( [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] data = neutron.list_ports(**search_opts) [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.list('ports', self.ports_path, retrieve_all, [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] for r in self._pagination(collection, path, **params): [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] res = self.get(path, params=params) [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.retry_request("GET", action, body=body, [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.do_request(method, action, body=body, [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._handle_fault_response(status_code, replybody, resp) [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise exception.Unauthorized() [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] nova.exception.Unauthorized: Not authorized. [ 2334.009400] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.085150] env[61573]: INFO nova.scheduler.client.report [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Deleted allocations for instance 2dc713f6-b67b-4360-a751-29b7218e130a [ 2334.105240] env[61573]: DEBUG oslo_concurrency.lockutils [None req-42c9e44a-f63d-46a2-a59d-824f0cce3a7e tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "2dc713f6-b67b-4360-a751-29b7218e130a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 560.482s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2334.105559] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "2dc713f6-b67b-4360-a751-29b7218e130a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 364.930s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2334.105790] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Acquiring lock "2dc713f6-b67b-4360-a751-29b7218e130a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2334.106012] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "2dc713f6-b67b-4360-a751-29b7218e130a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2334.106190] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "2dc713f6-b67b-4360-a751-29b7218e130a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2334.108145] env[61573]: INFO nova.compute.manager [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Terminating instance [ 2334.109841] env[61573]: DEBUG nova.compute.manager [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2334.110068] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2334.110537] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02d874ec-0485-4441-98bb-de330527a5bf {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.119200] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d521ee4-ca8d-4671-b45a-39a8c19416a3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.151719] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2dc713f6-b67b-4360-a751-29b7218e130a could not be found. [ 2334.151998] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2334.152216] env[61573]: INFO nova.compute.manager [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2334.152473] env[61573]: DEBUG oslo.service.loopingcall [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2334.152704] env[61573]: DEBUG nova.compute.manager [-] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2334.152795] env[61573]: DEBUG nova.network.neutron [-] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2334.244981] env[61573]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61573) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2334.245318] env[61573]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-1ce0f7ba-0c1a-4b57-b050-6f547be25f4c'] [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2334.245883] env[61573]: ERROR oslo.service.loopingcall [ 2334.247291] env[61573]: ERROR nova.compute.manager [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2334.286648] env[61573]: ERROR nova.compute.manager [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] exception_handler_v20(status_code, error_body) [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise client_exc(message=error_message, [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Neutron server returns request_ids: ['req-1ce0f7ba-0c1a-4b57-b050-6f547be25f4c'] [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] During handling of the above exception, another exception occurred: [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Traceback (most recent call last): [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._delete_instance(context, instance, bdms) [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._shutdown_instance(context, instance, bdms) [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._try_deallocate_network(context, instance, requested_networks) [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] with excutils.save_and_reraise_exception(): [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self.force_reraise() [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise self.value [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] _deallocate_network_with_retries() [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return evt.wait() [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] result = hub.switch() [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.greenlet.switch() [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] result = func(*self.args, **self.kw) [ 2334.286648] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] result = f(*args, **kwargs) [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._deallocate_network( [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self.network_api.deallocate_for_instance( [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] data = neutron.list_ports(**search_opts) [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.list('ports', self.ports_path, retrieve_all, [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] for r in self._pagination(collection, path, **params): [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] res = self.get(path, params=params) [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.retry_request("GET", action, body=body, [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] return self.do_request(method, action, body=body, [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] ret = obj(*args, **kwargs) [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] self._handle_fault_response(status_code, replybody, resp) [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2334.287646] env[61573]: ERROR nova.compute.manager [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] [ 2334.315788] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Lock "2dc713f6-b67b-4360-a751-29b7218e130a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2334.369108] env[61573]: INFO nova.compute.manager [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] [instance: 2dc713f6-b67b-4360-a751-29b7218e130a] Successfully reverted task state from None on failure for instance. [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server [None req-6b3bac13-58df-4579-b5d8-d62c2552edfd tempest-ServersTestMultiNic-507798381 tempest-ServersTestMultiNic-507798381-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-1ce0f7ba-0c1a-4b57-b050-6f547be25f4c'] [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 2334.372832] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server raise self.value [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2334.374376] env[61573]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2334.376135] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2334.376135] env[61573]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2334.376135] env[61573]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2334.376135] env[61573]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2334.376135] env[61573]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2334.376135] env[61573]: ERROR oslo_messaging.rpc.server [ 2343.404028] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.929934] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquiring lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.675685] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2366.697603] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Getting list of instances from cluster (obj){ [ 2366.697603] env[61573]: value = "domain-c8" [ 2366.697603] env[61573]: _type = "ClusterComputeResource" [ 2366.697603] env[61573]: } {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2366.698951] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1432fd15-0426-4ffc-9bd8-c5d4f5fae0c6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.715052] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Got total of 7 instances {{(pid=61573) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2366.715052] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid d80e3b10-95a8-45b8-84d2-6221ba33f2d7 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 2366.715052] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid c3053874-e935-40c5-ac81-268e759611f1 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 2366.715052] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 2aa8f536-49ba-43f8-8f36-5741e300652a {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 2366.715259] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid d892e1ae-e434-40b2-b86a-99c55d473363 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 2366.715339] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 2366.715489] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 07210470-d769-43e0-8d38-b076c374d203 {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 2366.715638] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Triggering sync for uuid 220b6e63-be92-4ac1-9561-f2967b361eee {{(pid=61573) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10405}} [ 2366.715960] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.716203] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "c3053874-e935-40c5-ac81-268e759611f1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.716406] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "2aa8f536-49ba-43f8-8f36-5741e300652a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.716603] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "d892e1ae-e434-40b2-b86a-99c55d473363" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.716793] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.716983] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "07210470-d769-43e0-8d38-b076c374d203" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.717192] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "220b6e63-be92-4ac1-9561-f2967b361eee" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.541032] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "07210470-d769-43e0-8d38-b076c374d203" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2378.727936] env[61573]: WARNING oslo_vmware.rw_handles [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2378.727936] env[61573]: ERROR oslo_vmware.rw_handles [ 2378.728652] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/bb64034d-efce-4211-af16-181448bfcfcb/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2378.730470] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2378.730732] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Copying Virtual Disk [datastore2] vmware_temp/bb64034d-efce-4211-af16-181448bfcfcb/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/bb64034d-efce-4211-af16-181448bfcfcb/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2378.731055] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24511bf1-0f59-4c56-b8cc-22573748441f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.741135] env[61573]: DEBUG oslo_vmware.api [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for the task: (returnval){ [ 2378.741135] env[61573]: value = "task-4836950" [ 2378.741135] env[61573]: _type = "Task" [ 2378.741135] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2378.750567] env[61573]: DEBUG oslo_vmware.api [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Task: {'id': task-4836950, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.250997] env[61573]: DEBUG oslo_vmware.exceptions [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2379.251404] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2379.251864] env[61573]: ERROR nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2379.251864] env[61573]: Faults: ['InvalidArgument'] [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Traceback (most recent call last): [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] yield resources [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] self.driver.spawn(context, instance, image_meta, [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] self._fetch_image_if_missing(context, vi) [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] image_cache(vi, tmp_image_ds_loc) [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] vm_util.copy_virtual_disk( [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] session._wait_for_task(vmdk_copy_task) [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] return self.wait_for_task(task_ref) [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] return evt.wait() [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] result = hub.switch() [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] return self.greenlet.switch() [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] self.f(*self.args, **self.kw) [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] raise exceptions.translate_fault(task_info.error) [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Faults: ['InvalidArgument'] [ 2379.251864] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] [ 2379.253047] env[61573]: INFO nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Terminating instance [ 2379.253758] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2379.253971] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2379.254241] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b4a1779-9cfe-46a1-beb2-c3531d748ad4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.256532] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2379.256726] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2379.257446] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07db8154-ccdb-4de8-8122-083cd2de3683 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.264681] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2379.264908] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12b864c1-d9fa-4372-a7c9-36f137523da0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.267252] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2379.267459] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2379.268438] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36131a3c-a72c-4310-b88d-92523694d14d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.273546] env[61573]: DEBUG oslo_vmware.api [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Waiting for the task: (returnval){ [ 2379.273546] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]529ebb0d-c8ee-5db0-66d9-9d2ac59a5bc2" [ 2379.273546] env[61573]: _type = "Task" [ 2379.273546] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.281507] env[61573]: DEBUG oslo_vmware.api [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]529ebb0d-c8ee-5db0-66d9-9d2ac59a5bc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.334887] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2379.335229] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2379.335346] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Deleting the datastore file [datastore2] d80e3b10-95a8-45b8-84d2-6221ba33f2d7 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2379.335539] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21f9b7ea-1283-43db-b0eb-1a1fa56bb7ca {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.342445] env[61573]: DEBUG oslo_vmware.api [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for the task: (returnval){ [ 2379.342445] env[61573]: value = "task-4836952" [ 2379.342445] env[61573]: _type = "Task" [ 2379.342445] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.350702] env[61573]: DEBUG oslo_vmware.api [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Task: {'id': task-4836952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.784013] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2379.784507] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Creating directory with path [datastore2] vmware_temp/62572434-94a2-4600-8ab8-e580ac90fd99/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2379.784867] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a39460ee-44bb-4f9f-887f-17f7813e61d5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.797690] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Created directory with path [datastore2] vmware_temp/62572434-94a2-4600-8ab8-e580ac90fd99/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2379.797919] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Fetch image to [datastore2] vmware_temp/62572434-94a2-4600-8ab8-e580ac90fd99/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2379.798070] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/62572434-94a2-4600-8ab8-e580ac90fd99/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2379.798888] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0b1995-39bc-43dc-bec5-dcc410079c0e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.806216] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44baf05a-bdbd-43e3-b66f-79ce49331bee {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.816068] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7986cf-5d3f-4a21-a6df-5b4ee13f0242 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.858313] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccd7b98-6119-48db-ac03-95a1231e1e79 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.867307] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b59439a3-acfd-45a1-b723-2034c3e287d7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.868992] env[61573]: DEBUG oslo_vmware.api [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Task: {'id': task-4836952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079311} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2379.869268] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2379.869446] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2379.869618] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2379.869789] env[61573]: INFO nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2379.871976] env[61573]: DEBUG nova.compute.claims [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2379.872124] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.872344] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.891116] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2379.955950] env[61573]: DEBUG oslo_vmware.rw_handles [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/62572434-94a2-4600-8ab8-e580ac90fd99/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2380.023475] env[61573]: DEBUG oslo_vmware.rw_handles [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2380.023672] env[61573]: DEBUG oslo_vmware.rw_handles [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/62572434-94a2-4600-8ab8-e580ac90fd99/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2380.097989] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b0fcbb-ae6c-4221-b5ad-8469a37ee6ae {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.107233] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f79c131-b1bf-4820-b9d7-68c5a96db11c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.137250] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f444c0b7-f50e-459e-871b-3dea61ebc86f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.145294] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2febf35c-d4ab-40c8-88b3-236d0bcd6898 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.161681] env[61573]: DEBUG nova.compute.provider_tree [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2380.171857] env[61573]: DEBUG nova.scheduler.client.report [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2380.192887] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.320s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.193466] env[61573]: ERROR nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2380.193466] env[61573]: Faults: ['InvalidArgument'] [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Traceback (most recent call last): [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] self.driver.spawn(context, instance, image_meta, [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] self._fetch_image_if_missing(context, vi) [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] image_cache(vi, tmp_image_ds_loc) [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] vm_util.copy_virtual_disk( [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] session._wait_for_task(vmdk_copy_task) [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] return self.wait_for_task(task_ref) [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] return evt.wait() [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] result = hub.switch() [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] return self.greenlet.switch() [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] self.f(*self.args, **self.kw) [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] raise exceptions.translate_fault(task_info.error) [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Faults: ['InvalidArgument'] [ 2380.193466] env[61573]: ERROR nova.compute.manager [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] [ 2380.194626] env[61573]: DEBUG nova.compute.utils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2380.195701] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Build of instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 was re-scheduled: A specified parameter was not correct: fileType [ 2380.195701] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2380.196092] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2380.196273] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2380.196451] env[61573]: DEBUG nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2380.196610] env[61573]: DEBUG nova.network.neutron [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2380.546477] env[61573]: DEBUG nova.network.neutron [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2380.566549] env[61573]: INFO nova.compute.manager [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Took 0.37 seconds to deallocate network for instance. [ 2380.703313] env[61573]: INFO nova.scheduler.client.report [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Deleted allocations for instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 [ 2380.725865] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bf5c8336-ef5e-43d5-a6e5-ecc5a291a7ec tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 601.768s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.727074] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 405.719s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.727337] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Acquiring lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.727545] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.727712] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.730192] env[61573]: INFO nova.compute.manager [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Terminating instance [ 2380.732481] env[61573]: DEBUG nova.compute.manager [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2380.733279] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2380.733604] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee027b76-6dc0-4a21-a0a5-ae2e21d6b276 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.744351] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d07047-c622-4758-902e-1b4992614313 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.775831] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d80e3b10-95a8-45b8-84d2-6221ba33f2d7 could not be found. [ 2380.776205] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2380.776470] env[61573]: INFO nova.compute.manager [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2380.776760] env[61573]: DEBUG oslo.service.loopingcall [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2380.777055] env[61573]: DEBUG nova.compute.manager [-] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2380.777192] env[61573]: DEBUG nova.network.neutron [-] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2380.806177] env[61573]: DEBUG nova.network.neutron [-] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2380.816396] env[61573]: INFO nova.compute.manager [-] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] Took 0.04 seconds to deallocate network for instance. [ 2380.942781] env[61573]: DEBUG oslo_concurrency.lockutils [None req-3e1abc3a-bdbd-41c6-a15c-c494ce6d351d tempest-DeleteServersTestJSON-1067357275 tempest-DeleteServersTestJSON-1067357275-project-member] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.943652] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.228s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.943835] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d80e3b10-95a8-45b8-84d2-6221ba33f2d7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2380.944015] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "d80e3b10-95a8-45b8-84d2-6221ba33f2d7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2381.445997] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2381.446335] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2381.446511] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2381.463626] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c3053874-e935-40c5-ac81-268e759611f1] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2381.463797] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2381.463914] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2381.464082] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2381.464238] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 07210470-d769-43e0-8d38-b076c374d203] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2381.464386] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2381.464519] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2381.465100] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.403977] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.404340] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2382.404599] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.419481] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.419974] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2382.419974] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2382.420070] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2382.421305] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10eea1f0-7023-4df0-a76e-5b43d37b5015 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.430621] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b80914-3785-4fc6-af62-99890e171f35 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.446107] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8170d7a3-08dd-432e-8573-410718924e94 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.453063] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8625dac1-a5a2-45f6-9f3a-5f446609ccb6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.487802] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180508MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2382.487802] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.487802] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2382.747081] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance c3053874-e935-40c5-ac81-268e759611f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2382.747292] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2aa8f536-49ba-43f8-8f36-5741e300652a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2382.747441] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d892e1ae-e434-40b2-b86a-99c55d473363 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2382.747577] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2382.748212] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 07210470-d769-43e0-8d38-b076c374d203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2382.748212] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 220b6e63-be92-4ac1-9561-f2967b361eee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2382.748212] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2382.748212] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=100GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] stats={'failed_builds': '89', 'num_instances': '6', 'num_vm_building': '6', 'num_task_deleting': '5', 'num_os_type_None': '6', 'num_proj_7b0a7b0d400a42c9b32dc3f491d17d74': '1', 'io_workload': '6', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_proj_9f5bf0addc41483c9bad3c1497811f08': '1', 'num_proj_f5656651931541f9b48c3e185a46a113': '1', 'num_task_spawning': '1', 'num_proj_9d80b82b526a4ca9b79db7d72896c393': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2382.839108] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b3a038-0618-457c-9100-b1cc9b5c5d6f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.847541] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16bca8f-7afb-4d08-947f-52ce5646caf8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.880573] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2246d5eb-b68c-4d7b-b8e9-7b9bde1d3de0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.889185] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48e4519-52bc-46d1-ab4d-2256a4fdff1d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.902908] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2382.912598] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2382.932553] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2382.932912] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.447s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2384.934774] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.404322] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2387.399489] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.404629] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.405060] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.833700] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2389.834058] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2389.845435] env[61573]: DEBUG nova.compute.manager [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2389.897964] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2389.898243] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2389.900108] env[61573]: INFO nova.compute.claims [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2390.050888] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87ddea7-b7ba-45fb-9fab-2a856b72df86 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.059399] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f2930f-538a-40e7-bcc2-e5deee0a92d5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.091562] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3024d6d9-cbbe-44bb-bdb1-0fd6d6830d3b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.100263] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e0dc65-63bb-48a0-bc5f-8d6ac6dd664e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.114144] env[61573]: DEBUG nova.compute.provider_tree [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2390.125133] env[61573]: DEBUG nova.scheduler.client.report [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2390.140412] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.242s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2390.140862] env[61573]: DEBUG nova.compute.manager [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2390.184661] env[61573]: DEBUG nova.compute.utils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2390.186181] env[61573]: DEBUG nova.compute.manager [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2390.186308] env[61573]: DEBUG nova.network.neutron [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2390.196521] env[61573]: DEBUG nova.compute.manager [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2390.278821] env[61573]: DEBUG nova.compute.manager [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2390.293454] env[61573]: DEBUG nova.policy [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e209cfe158004e46a9693c62a5c2e3f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90325af48fc44451a6c15e089107271a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 2390.305934] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2390.306305] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2390.306457] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2390.306649] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2390.306806] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2390.306967] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2390.307384] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2390.307583] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2390.307777] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2390.307951] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2390.308351] env[61573]: DEBUG nova.virt.hardware [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2390.309214] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64eee0c-c7d0-43a7-971b-3843e0b28e4d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.318674] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e553222-735b-4917-bbeb-3a73db034132 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.666306] env[61573]: DEBUG nova.network.neutron [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Successfully created port: 69214c85-6efb-469a-8f5f-fc43abc5aec1 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2391.440422] env[61573]: DEBUG nova.compute.manager [req-c0ccf2bf-be78-4037-b3b4-f52319827fa7 req-00c2e015-ef2e-489d-bc40-2d1559300120 service nova] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Received event network-vif-plugged-69214c85-6efb-469a-8f5f-fc43abc5aec1 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2391.440422] env[61573]: DEBUG oslo_concurrency.lockutils [req-c0ccf2bf-be78-4037-b3b4-f52319827fa7 req-00c2e015-ef2e-489d-bc40-2d1559300120 service nova] Acquiring lock "0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2391.444915] env[61573]: DEBUG oslo_concurrency.lockutils [req-c0ccf2bf-be78-4037-b3b4-f52319827fa7 req-00c2e015-ef2e-489d-bc40-2d1559300120 service nova] Lock "0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2391.444915] env[61573]: DEBUG oslo_concurrency.lockutils [req-c0ccf2bf-be78-4037-b3b4-f52319827fa7 req-00c2e015-ef2e-489d-bc40-2d1559300120 service nova] Lock "0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2391.444915] env[61573]: DEBUG nova.compute.manager [req-c0ccf2bf-be78-4037-b3b4-f52319827fa7 req-00c2e015-ef2e-489d-bc40-2d1559300120 service nova] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] No waiting events found dispatching network-vif-plugged-69214c85-6efb-469a-8f5f-fc43abc5aec1 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2391.444915] env[61573]: WARNING nova.compute.manager [req-c0ccf2bf-be78-4037-b3b4-f52319827fa7 req-00c2e015-ef2e-489d-bc40-2d1559300120 service nova] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Received unexpected event network-vif-plugged-69214c85-6efb-469a-8f5f-fc43abc5aec1 for instance with vm_state building and task_state spawning. [ 2391.531249] env[61573]: DEBUG nova.network.neutron [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Successfully updated port: 69214c85-6efb-469a-8f5f-fc43abc5aec1 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2391.544376] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "refresh_cache-0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2391.544834] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "refresh_cache-0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2391.544834] env[61573]: DEBUG nova.network.neutron [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2391.589723] env[61573]: DEBUG nova.network.neutron [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2391.773975] env[61573]: DEBUG nova.network.neutron [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Updating instance_info_cache with network_info: [{"id": "69214c85-6efb-469a-8f5f-fc43abc5aec1", "address": "fa:16:3e:48:19:75", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69214c85-6e", "ovs_interfaceid": "69214c85-6efb-469a-8f5f-fc43abc5aec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2391.789683] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "refresh_cache-0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2391.789986] env[61573]: DEBUG nova.compute.manager [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Instance network_info: |[{"id": "69214c85-6efb-469a-8f5f-fc43abc5aec1", "address": "fa:16:3e:48:19:75", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69214c85-6e", "ovs_interfaceid": "69214c85-6efb-469a-8f5f-fc43abc5aec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2391.790606] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:19:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69214c85-6efb-469a-8f5f-fc43abc5aec1', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2391.798308] env[61573]: DEBUG oslo.service.loopingcall [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2391.798836] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2391.799079] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d0ac88a-9ef3-4f79-bbe9-474b8b3aa331 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.819812] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2391.819812] env[61573]: value = "task-4836953" [ 2391.819812] env[61573]: _type = "Task" [ 2391.819812] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2391.828322] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836953, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2392.331435] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836953, 'name': CreateVM_Task, 'duration_secs': 0.312944} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2392.331435] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2392.332015] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2392.332196] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2392.332535] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2392.332807] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9459fb83-e54e-4059-9f7e-e07d2f6cadbc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.337809] env[61573]: DEBUG oslo_vmware.api [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 2392.337809] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52987145-28f3-9fa8-a856-07319948e0e7" [ 2392.337809] env[61573]: _type = "Task" [ 2392.337809] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2392.345725] env[61573]: DEBUG oslo_vmware.api [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52987145-28f3-9fa8-a856-07319948e0e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2392.849176] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2392.849551] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2392.849632] env[61573]: DEBUG oslo_concurrency.lockutils [None req-a8ad783b-715d-4980-9d0e-a92b812ea818 tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2393.471250] env[61573]: DEBUG nova.compute.manager [req-b0edcb20-7094-4c23-bc27-a9820dee7ecd req-0f92a4de-3e5d-4fe8-bfb0-01b5ea51f88f service nova] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Received event network-changed-69214c85-6efb-469a-8f5f-fc43abc5aec1 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2393.471426] env[61573]: DEBUG nova.compute.manager [req-b0edcb20-7094-4c23-bc27-a9820dee7ecd req-0f92a4de-3e5d-4fe8-bfb0-01b5ea51f88f service nova] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Refreshing instance network info cache due to event network-changed-69214c85-6efb-469a-8f5f-fc43abc5aec1. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 2393.471636] env[61573]: DEBUG oslo_concurrency.lockutils [req-b0edcb20-7094-4c23-bc27-a9820dee7ecd req-0f92a4de-3e5d-4fe8-bfb0-01b5ea51f88f service nova] Acquiring lock "refresh_cache-0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2393.471777] env[61573]: DEBUG oslo_concurrency.lockutils [req-b0edcb20-7094-4c23-bc27-a9820dee7ecd req-0f92a4de-3e5d-4fe8-bfb0-01b5ea51f88f service nova] Acquired lock "refresh_cache-0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2393.471938] env[61573]: DEBUG nova.network.neutron [req-b0edcb20-7094-4c23-bc27-a9820dee7ecd req-0f92a4de-3e5d-4fe8-bfb0-01b5ea51f88f service nova] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Refreshing network info cache for port 69214c85-6efb-469a-8f5f-fc43abc5aec1 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2393.780539] env[61573]: DEBUG nova.network.neutron [req-b0edcb20-7094-4c23-bc27-a9820dee7ecd req-0f92a4de-3e5d-4fe8-bfb0-01b5ea51f88f service nova] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Updated VIF entry in instance network info cache for port 69214c85-6efb-469a-8f5f-fc43abc5aec1. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2393.780923] env[61573]: DEBUG nova.network.neutron [req-b0edcb20-7094-4c23-bc27-a9820dee7ecd req-0f92a4de-3e5d-4fe8-bfb0-01b5ea51f88f service nova] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Updating instance_info_cache with network_info: [{"id": "69214c85-6efb-469a-8f5f-fc43abc5aec1", "address": "fa:16:3e:48:19:75", "network": {"id": "cd1def66-e622-4094-a48b-4517ee7fda0b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1144251777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90325af48fc44451a6c15e089107271a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69214c85-6e", "ovs_interfaceid": "69214c85-6efb-469a-8f5f-fc43abc5aec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2393.790877] env[61573]: DEBUG oslo_concurrency.lockutils [req-b0edcb20-7094-4c23-bc27-a9820dee7ecd req-0f92a4de-3e5d-4fe8-bfb0-01b5ea51f88f service nova] Releasing lock "refresh_cache-0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2397.231251] env[61573]: DEBUG oslo_concurrency.lockutils [None req-1c2166a9-a988-4a50-b097-481e25ed8030 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "220b6e63-be92-4ac1-9561-f2967b361eee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2406.482293] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "4b7659c4-5bbf-4f59-b858-59ff4e40036b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2406.482668] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "4b7659c4-5bbf-4f59-b858-59ff4e40036b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.494197] env[61573]: DEBUG nova.compute.manager [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2406.556402] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2406.556691] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.558508] env[61573]: INFO nova.compute.claims [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2406.708797] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50dffc93-9c3a-4256-912d-f3b251e81b97 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.717289] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1edd836a-86a6-47c5-8c5f-76a16ca41968 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.751682] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e074af6-dd7d-4831-89ac-7d73ed26a515 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.759995] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588ee56b-864e-421b-8780-1acaeae8a02a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.774901] env[61573]: DEBUG nova.compute.provider_tree [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2406.783689] env[61573]: DEBUG nova.scheduler.client.report [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2406.797834] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.241s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2406.798350] env[61573]: DEBUG nova.compute.manager [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2406.843569] env[61573]: DEBUG nova.compute.utils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2406.844994] env[61573]: DEBUG nova.compute.manager [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2406.845314] env[61573]: DEBUG nova.network.neutron [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2406.855396] env[61573]: DEBUG nova.compute.manager [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2406.918676] env[61573]: DEBUG nova.policy [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08c59199cd604147a2f0a2cd0dc95773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e08d442d2b554ce6bd9e2cc031cf6735', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 2406.924808] env[61573]: DEBUG nova.compute.manager [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2406.951037] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2406.951303] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2406.951661] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2406.951661] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2406.951809] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2406.951982] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2406.952316] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2406.952476] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2406.952640] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2406.952983] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2406.952983] env[61573]: DEBUG nova.virt.hardware [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2406.953827] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84ebb45-72cf-4562-8067-2152af06e0dc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.962596] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1378b0af-440e-4867-93f0-8e092227f760 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.273531] env[61573]: DEBUG nova.network.neutron [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Successfully created port: 3f2546f8-5b7e-4ed4-a8ee-6063a78f60df {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2408.033117] env[61573]: DEBUG nova.compute.manager [req-d0926621-8b1b-4b34-a72d-0e235bc907fa req-02f6d9c3-e132-4974-a7be-11835d53521b service nova] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Received event network-vif-plugged-3f2546f8-5b7e-4ed4-a8ee-6063a78f60df {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2408.033117] env[61573]: DEBUG oslo_concurrency.lockutils [req-d0926621-8b1b-4b34-a72d-0e235bc907fa req-02f6d9c3-e132-4974-a7be-11835d53521b service nova] Acquiring lock "4b7659c4-5bbf-4f59-b858-59ff4e40036b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2408.033117] env[61573]: DEBUG oslo_concurrency.lockutils [req-d0926621-8b1b-4b34-a72d-0e235bc907fa req-02f6d9c3-e132-4974-a7be-11835d53521b service nova] Lock "4b7659c4-5bbf-4f59-b858-59ff4e40036b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2408.033117] env[61573]: DEBUG oslo_concurrency.lockutils [req-d0926621-8b1b-4b34-a72d-0e235bc907fa req-02f6d9c3-e132-4974-a7be-11835d53521b service nova] Lock "4b7659c4-5bbf-4f59-b858-59ff4e40036b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2408.033117] env[61573]: DEBUG nova.compute.manager [req-d0926621-8b1b-4b34-a72d-0e235bc907fa req-02f6d9c3-e132-4974-a7be-11835d53521b service nova] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] No waiting events found dispatching network-vif-plugged-3f2546f8-5b7e-4ed4-a8ee-6063a78f60df {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2408.035192] env[61573]: WARNING nova.compute.manager [req-d0926621-8b1b-4b34-a72d-0e235bc907fa req-02f6d9c3-e132-4974-a7be-11835d53521b service nova] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Received unexpected event network-vif-plugged-3f2546f8-5b7e-4ed4-a8ee-6063a78f60df for instance with vm_state building and task_state spawning. [ 2408.140538] env[61573]: DEBUG nova.network.neutron [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Successfully updated port: 3f2546f8-5b7e-4ed4-a8ee-6063a78f60df {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2408.151226] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "refresh_cache-4b7659c4-5bbf-4f59-b858-59ff4e40036b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2408.151226] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "refresh_cache-4b7659c4-5bbf-4f59-b858-59ff4e40036b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2408.151226] env[61573]: DEBUG nova.network.neutron [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2408.210259] env[61573]: DEBUG nova.network.neutron [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2408.399721] env[61573]: DEBUG nova.network.neutron [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Updating instance_info_cache with network_info: [{"id": "3f2546f8-5b7e-4ed4-a8ee-6063a78f60df", "address": "fa:16:3e:01:ec:a6", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f2546f8-5b", "ovs_interfaceid": "3f2546f8-5b7e-4ed4-a8ee-6063a78f60df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2408.427124] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "refresh_cache-4b7659c4-5bbf-4f59-b858-59ff4e40036b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2408.427587] env[61573]: DEBUG nova.compute.manager [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Instance network_info: |[{"id": "3f2546f8-5b7e-4ed4-a8ee-6063a78f60df", "address": "fa:16:3e:01:ec:a6", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f2546f8-5b", "ovs_interfaceid": "3f2546f8-5b7e-4ed4-a8ee-6063a78f60df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2408.428433] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:ec:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f2546f8-5b7e-4ed4-a8ee-6063a78f60df', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2408.436206] env[61573]: DEBUG oslo.service.loopingcall [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2408.437273] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2408.438027] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52c2aa40-946c-40e9-abcb-a0733fa7d0ab {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.458928] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2408.458928] env[61573]: value = "task-4836954" [ 2408.458928] env[61573]: _type = "Task" [ 2408.458928] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2408.469740] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836954, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2408.969659] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836954, 'name': CreateVM_Task, 'duration_secs': 0.385387} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2408.969832] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2408.977281] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2408.977462] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2408.977781] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2408.978046] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bc6e4f2-254d-4897-9440-35cc5a72c1ee {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.983101] env[61573]: DEBUG oslo_vmware.api [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 2408.983101] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5263d447-7454-3bf0-abaa-b3f0d1ee5199" [ 2408.983101] env[61573]: _type = "Task" [ 2408.983101] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2408.991098] env[61573]: DEBUG oslo_vmware.api [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5263d447-7454-3bf0-abaa-b3f0d1ee5199, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2409.495270] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2409.495620] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2409.495726] env[61573]: DEBUG oslo_concurrency.lockutils [None req-e222dcb4-9d8b-4eb9-96d7-cd1503b83b59 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2410.059073] env[61573]: DEBUG nova.compute.manager [req-ce52970d-2a45-4969-8e58-9d2891411579 req-e4ae74e7-b8c7-4f9c-9a81-7d0d183691df service nova] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Received event network-changed-3f2546f8-5b7e-4ed4-a8ee-6063a78f60df {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2410.059290] env[61573]: DEBUG nova.compute.manager [req-ce52970d-2a45-4969-8e58-9d2891411579 req-e4ae74e7-b8c7-4f9c-9a81-7d0d183691df service nova] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Refreshing instance network info cache due to event network-changed-3f2546f8-5b7e-4ed4-a8ee-6063a78f60df. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 2410.059512] env[61573]: DEBUG oslo_concurrency.lockutils [req-ce52970d-2a45-4969-8e58-9d2891411579 req-e4ae74e7-b8c7-4f9c-9a81-7d0d183691df service nova] Acquiring lock "refresh_cache-4b7659c4-5bbf-4f59-b858-59ff4e40036b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2410.059714] env[61573]: DEBUG oslo_concurrency.lockutils [req-ce52970d-2a45-4969-8e58-9d2891411579 req-e4ae74e7-b8c7-4f9c-9a81-7d0d183691df service nova] Acquired lock "refresh_cache-4b7659c4-5bbf-4f59-b858-59ff4e40036b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2410.059841] env[61573]: DEBUG nova.network.neutron [req-ce52970d-2a45-4969-8e58-9d2891411579 req-e4ae74e7-b8c7-4f9c-9a81-7d0d183691df service nova] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Refreshing network info cache for port 3f2546f8-5b7e-4ed4-a8ee-6063a78f60df {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2410.387015] env[61573]: DEBUG nova.network.neutron [req-ce52970d-2a45-4969-8e58-9d2891411579 req-e4ae74e7-b8c7-4f9c-9a81-7d0d183691df service nova] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Updated VIF entry in instance network info cache for port 3f2546f8-5b7e-4ed4-a8ee-6063a78f60df. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2410.387435] env[61573]: DEBUG nova.network.neutron [req-ce52970d-2a45-4969-8e58-9d2891411579 req-e4ae74e7-b8c7-4f9c-9a81-7d0d183691df service nova] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Updating instance_info_cache with network_info: [{"id": "3f2546f8-5b7e-4ed4-a8ee-6063a78f60df", "address": "fa:16:3e:01:ec:a6", "network": {"id": "11570da2-aa55-4219-bd9e-6824ecf1cb82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1536196838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08d442d2b554ce6bd9e2cc031cf6735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f2546f8-5b", "ovs_interfaceid": "3f2546f8-5b7e-4ed4-a8ee-6063a78f60df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2410.396064] env[61573]: DEBUG oslo_concurrency.lockutils [req-ce52970d-2a45-4969-8e58-9d2891411579 req-e4ae74e7-b8c7-4f9c-9a81-7d0d183691df service nova] Releasing lock "refresh_cache-4b7659c4-5bbf-4f59-b858-59ff4e40036b" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2428.747969] env[61573]: WARNING oslo_vmware.rw_handles [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2428.747969] env[61573]: ERROR oslo_vmware.rw_handles [ 2428.748796] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/62572434-94a2-4600-8ab8-e580ac90fd99/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2428.750523] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2428.750870] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Copying Virtual Disk [datastore2] vmware_temp/62572434-94a2-4600-8ab8-e580ac90fd99/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/62572434-94a2-4600-8ab8-e580ac90fd99/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2428.751213] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0240b5cc-77bf-43d6-af04-42de8184a9ab {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.758897] env[61573]: DEBUG oslo_vmware.api [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Waiting for the task: (returnval){ [ 2428.758897] env[61573]: value = "task-4836955" [ 2428.758897] env[61573]: _type = "Task" [ 2428.758897] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2428.767317] env[61573]: DEBUG oslo_vmware.api [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Task: {'id': task-4836955, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.269566] env[61573]: DEBUG oslo_vmware.exceptions [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2429.269863] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2429.270461] env[61573]: ERROR nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2429.270461] env[61573]: Faults: ['InvalidArgument'] [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] Traceback (most recent call last): [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] yield resources [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] self.driver.spawn(context, instance, image_meta, [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] self._fetch_image_if_missing(context, vi) [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] image_cache(vi, tmp_image_ds_loc) [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] vm_util.copy_virtual_disk( [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] session._wait_for_task(vmdk_copy_task) [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] return self.wait_for_task(task_ref) [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] return evt.wait() [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] result = hub.switch() [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] return self.greenlet.switch() [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] self.f(*self.args, **self.kw) [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] raise exceptions.translate_fault(task_info.error) [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] Faults: ['InvalidArgument'] [ 2429.270461] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] [ 2429.271619] env[61573]: INFO nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Terminating instance [ 2429.272334] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2429.272564] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2429.272818] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-377d9803-1806-44f0-823f-4883dba30b2e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.275101] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2429.275350] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2429.276138] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e267e9-b0c7-4a8e-8644-cde5aaa06c4e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.283302] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2429.283544] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77aeffb2-b9e1-4f3c-9fee-5d54abc384dc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.285681] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2429.285864] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2429.286854] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0931dd30-f019-49ce-9c32-eda07229a032 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.291786] env[61573]: DEBUG oslo_vmware.api [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 2429.291786] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d9b474-bd5f-1107-e1fb-5ae2d3f18544" [ 2429.291786] env[61573]: _type = "Task" [ 2429.291786] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2429.299422] env[61573]: DEBUG oslo_vmware.api [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d9b474-bd5f-1107-e1fb-5ae2d3f18544, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.350621] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2429.350873] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2429.351022] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Deleting the datastore file [datastore2] c3053874-e935-40c5-ac81-268e759611f1 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2429.351337] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1c4ea21-f3d7-4b2e-856e-d071bf3efcff {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.358641] env[61573]: DEBUG oslo_vmware.api [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Waiting for the task: (returnval){ [ 2429.358641] env[61573]: value = "task-4836957" [ 2429.358641] env[61573]: _type = "Task" [ 2429.358641] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2429.367453] env[61573]: DEBUG oslo_vmware.api [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Task: {'id': task-4836957, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.802713] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2429.803164] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating directory with path [datastore2] vmware_temp/a329160a-7cec-4dd3-8d7d-c82860223217/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2429.803257] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c3b9524-aee4-4346-82e9-9c6df39a5056 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.816815] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Created directory with path [datastore2] vmware_temp/a329160a-7cec-4dd3-8d7d-c82860223217/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2429.817124] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Fetch image to [datastore2] vmware_temp/a329160a-7cec-4dd3-8d7d-c82860223217/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2429.817365] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/a329160a-7cec-4dd3-8d7d-c82860223217/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2429.818440] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082f97a9-1e35-4b51-884f-57ed13b1a1a0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.825840] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f47ae6-440b-4b67-bc78-56e89a533d6e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.836681] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f35084-729e-4ce3-ba0f-8158d237f8aa {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.870322] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91ff243-e1e6-4a2e-af45-03dbb9a2528d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.880053] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-90f76d6b-f757-4fc0-9e6a-6d881ac05891 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.881362] env[61573]: DEBUG oslo_vmware.api [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Task: {'id': task-4836957, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067944} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2429.881617] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2429.881798] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2429.881970] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2429.882162] env[61573]: INFO nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2429.884373] env[61573]: DEBUG nova.compute.claims [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2429.884599] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2429.884829] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2429.904163] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2429.967605] env[61573]: DEBUG oslo_vmware.rw_handles [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a329160a-7cec-4dd3-8d7d-c82860223217/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2430.026757] env[61573]: DEBUG oslo_vmware.rw_handles [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2430.027093] env[61573]: DEBUG oslo_vmware.rw_handles [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a329160a-7cec-4dd3-8d7d-c82860223217/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2430.103803] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91106d6a-7aa1-49e3-a852-e228125263d5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.111788] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd33cd2-e127-47ec-85c2-8e9f7387095b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.143629] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d1a477-7e7f-487a-a517-96d5ccb2bd6e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.151265] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988de126-c840-482f-9b29-5407d190dd16 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.164604] env[61573]: DEBUG nova.compute.provider_tree [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2430.173536] env[61573]: DEBUG nova.scheduler.client.report [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2430.188051] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.303s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2430.188623] env[61573]: ERROR nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2430.188623] env[61573]: Faults: ['InvalidArgument'] [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] Traceback (most recent call last): [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] self.driver.spawn(context, instance, image_meta, [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] self._fetch_image_if_missing(context, vi) [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] image_cache(vi, tmp_image_ds_loc) [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] vm_util.copy_virtual_disk( [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] session._wait_for_task(vmdk_copy_task) [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] return self.wait_for_task(task_ref) [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] return evt.wait() [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] result = hub.switch() [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] return self.greenlet.switch() [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] self.f(*self.args, **self.kw) [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] raise exceptions.translate_fault(task_info.error) [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] Faults: ['InvalidArgument'] [ 2430.188623] env[61573]: ERROR nova.compute.manager [instance: c3053874-e935-40c5-ac81-268e759611f1] [ 2430.189481] env[61573]: DEBUG nova.compute.utils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2430.191065] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Build of instance c3053874-e935-40c5-ac81-268e759611f1 was re-scheduled: A specified parameter was not correct: fileType [ 2430.191065] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2430.191442] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2430.191616] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2430.191785] env[61573]: DEBUG nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2430.191950] env[61573]: DEBUG nova.network.neutron [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2430.685403] env[61573]: DEBUG nova.network.neutron [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2430.696590] env[61573]: INFO nova.compute.manager [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Took 0.50 seconds to deallocate network for instance. [ 2430.805746] env[61573]: INFO nova.scheduler.client.report [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Deleted allocations for instance c3053874-e935-40c5-ac81-268e759611f1 [ 2430.840656] env[61573]: DEBUG oslo_concurrency.lockutils [None req-35b2e654-63e0-486a-9b64-69b82e7c20f8 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "c3053874-e935-40c5-ac81-268e759611f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.025s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2430.840969] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "c3053874-e935-40c5-ac81-268e759611f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.331s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2430.841245] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Acquiring lock "c3053874-e935-40c5-ac81-268e759611f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2430.841498] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "c3053874-e935-40c5-ac81-268e759611f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2430.841697] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "c3053874-e935-40c5-ac81-268e759611f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2430.843835] env[61573]: INFO nova.compute.manager [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Terminating instance [ 2430.845500] env[61573]: DEBUG nova.compute.manager [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2430.845717] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2430.846238] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f64231a1-72af-4063-9131-bee8affc0309 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.857711] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113562c4-cf7a-4532-b9b1-dacfe1b83903 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.890743] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c3053874-e935-40c5-ac81-268e759611f1 could not be found. [ 2430.890934] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2430.891135] env[61573]: INFO nova.compute.manager [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] [instance: c3053874-e935-40c5-ac81-268e759611f1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2430.891512] env[61573]: DEBUG oslo.service.loopingcall [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2430.891654] env[61573]: DEBUG nova.compute.manager [-] [instance: c3053874-e935-40c5-ac81-268e759611f1] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2430.891718] env[61573]: DEBUG nova.network.neutron [-] [instance: c3053874-e935-40c5-ac81-268e759611f1] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2430.918539] env[61573]: DEBUG nova.network.neutron [-] [instance: c3053874-e935-40c5-ac81-268e759611f1] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2430.927149] env[61573]: INFO nova.compute.manager [-] [instance: c3053874-e935-40c5-ac81-268e759611f1] Took 0.04 seconds to deallocate network for instance. [ 2431.028419] env[61573]: DEBUG oslo_concurrency.lockutils [None req-77dd1e09-8988-4a3d-beda-034352c56b33 tempest-ServerTagsTestJSON-1419962919 tempest-ServerTagsTestJSON-1419962919-project-member] Lock "c3053874-e935-40c5-ac81-268e759611f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2431.029345] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "c3053874-e935-40c5-ac81-268e759611f1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 64.313s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2431.029569] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: c3053874-e935-40c5-ac81-268e759611f1] During sync_power_state the instance has a pending task (deleting). Skip. [ 2431.029709] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "c3053874-e935-40c5-ac81-268e759611f1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2442.404841] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2442.405261] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2442.405261] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2442.423670] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2442.423836] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2442.423947] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2442.424089] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 07210470-d769-43e0-8d38-b076c374d203] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2442.424215] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2442.424338] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2442.424459] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2442.424580] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2442.425264] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2442.425454] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2443.406980] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2444.403824] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2444.427500] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2444.427699] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2444.427799] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2444.427985] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2444.429269] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db32cf6-144e-4690-b2fd-79ec357094a8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.439026] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021fcaff-b168-4b3e-b977-d0aad2d398db {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.453838] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b97ca5-46e5-4c9e-aff6-53788d238fe6 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.461275] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60a6ed5-f02b-4f90-ba79-357e814311de {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.493397] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180497MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2444.493556] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2444.493767] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2444.690023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 2aa8f536-49ba-43f8-8f36-5741e300652a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2444.690023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d892e1ae-e434-40b2-b86a-99c55d473363 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2444.690023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2444.690023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 07210470-d769-43e0-8d38-b076c374d203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2444.690023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 220b6e63-be92-4ac1-9561-f2967b361eee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2444.690023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2444.690023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b7659c4-5bbf-4f59-b858-59ff4e40036b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2444.690023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2444.690023] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '90', 'num_instances': '7', 'num_vm_building': '7', 'num_task_deleting': '5', 'num_os_type_None': '7', 'num_proj_90325af48fc44451a6c15e089107271a': '2', 'io_workload': '7', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '2', 'num_proj_9f5bf0addc41483c9bad3c1497811f08': '1', 'num_proj_f5656651931541f9b48c3e185a46a113': '1', 'num_proj_9d80b82b526a4ca9b79db7d72896c393': '1', 'num_task_spawning': '2'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2444.814571] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf54eaf-63df-4e60-8142-62c2bc150b16 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.823740] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f8e394-0558-4513-acad-11aef049a04e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.853716] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38d0a69-9995-4569-933b-cbd6ab7b43f7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.861403] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64dd5922-a5d5-48ff-9593-74b223602065 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.873959] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2444.883982] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2444.900938] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2444.901146] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.407s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2445.901355] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.404278] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2447.399636] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.399022] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.417783] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.417783] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2478.767030] env[61573]: WARNING oslo_vmware.rw_handles [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2478.767030] env[61573]: ERROR oslo_vmware.rw_handles [ 2478.767030] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/a329160a-7cec-4dd3-8d7d-c82860223217/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2478.768846] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2478.769336] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Copying Virtual Disk [datastore2] vmware_temp/a329160a-7cec-4dd3-8d7d-c82860223217/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/a329160a-7cec-4dd3-8d7d-c82860223217/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2478.769794] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d3a6ff6-2ad2-44cf-994a-7ea76da45573 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2478.778112] env[61573]: DEBUG oslo_vmware.api [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 2478.778112] env[61573]: value = "task-4836958" [ 2478.778112] env[61573]: _type = "Task" [ 2478.778112] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2478.788940] env[61573]: DEBUG oslo_vmware.api [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': task-4836958, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2479.294022] env[61573]: DEBUG oslo_vmware.exceptions [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2479.294022] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2479.294022] env[61573]: ERROR nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2479.294022] env[61573]: Faults: ['InvalidArgument'] [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Traceback (most recent call last): [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] yield resources [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] self.driver.spawn(context, instance, image_meta, [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] self._fetch_image_if_missing(context, vi) [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] image_cache(vi, tmp_image_ds_loc) [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] vm_util.copy_virtual_disk( [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] session._wait_for_task(vmdk_copy_task) [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] return self.wait_for_task(task_ref) [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] return evt.wait() [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] result = hub.switch() [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] return self.greenlet.switch() [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] self.f(*self.args, **self.kw) [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] raise exceptions.translate_fault(task_info.error) [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Faults: ['InvalidArgument'] [ 2479.294022] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] [ 2479.294022] env[61573]: INFO nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Terminating instance [ 2479.304024] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2479.304024] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2479.304024] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f25fb76a-fafa-4962-94ec-20a05b5f0cfc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.304024] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2479.304024] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2479.304988] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8204fd-e356-4ece-ba16-839776877ee5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.315035] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2479.317106] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a158943-2511-40bb-a167-ce39f2d1d2f4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.319089] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2479.319518] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2479.321022] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8125285e-289d-44df-a210-74cd3fa487f8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.327944] env[61573]: DEBUG oslo_vmware.api [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 2479.327944] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52014df6-9601-8dd0-1ab4-8ec3ec6797b3" [ 2479.327944] env[61573]: _type = "Task" [ 2479.327944] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2479.345984] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2479.345984] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating directory with path [datastore2] vmware_temp/86200807-8e02-4611-8558-0fde9ea1c96f/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2479.345984] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-509d5ada-7074-4084-b52b-3e027e577319 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.369546] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Created directory with path [datastore2] vmware_temp/86200807-8e02-4611-8558-0fde9ea1c96f/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2479.369793] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Fetch image to [datastore2] vmware_temp/86200807-8e02-4611-8558-0fde9ea1c96f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2479.369956] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/86200807-8e02-4611-8558-0fde9ea1c96f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2479.371224] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51f857d-58e3-4a47-bf3b-862caabb49b3 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.379809] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0202ae-71cb-49c9-ad71-cdfc354af194 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.392319] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833d8b88-137a-4ef5-8723-148224db592c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.398363] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2479.398664] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2479.398900] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Deleting the datastore file [datastore2] 2aa8f536-49ba-43f8-8f36-5741e300652a {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2479.399609] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cebe4525-f107-4f0b-a283-5b524a613b14 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.427353] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7da9a7-a7c6-4069-ad0b-745fbffd9f34 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.433150] env[61573]: DEBUG oslo_vmware.api [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for the task: (returnval){ [ 2479.433150] env[61573]: value = "task-4836960" [ 2479.433150] env[61573]: _type = "Task" [ 2479.433150] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2479.438638] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-19d9f3ec-20ce-440e-8078-b0c2b1ba70d7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.443931] env[61573]: DEBUG oslo_vmware.api [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': task-4836960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2479.466025] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2479.541790] env[61573]: DEBUG oslo_vmware.rw_handles [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/86200807-8e02-4611-8558-0fde9ea1c96f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2479.603574] env[61573]: DEBUG oslo_vmware.rw_handles [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2479.603843] env[61573]: DEBUG oslo_vmware.rw_handles [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/86200807-8e02-4611-8558-0fde9ea1c96f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2479.945894] env[61573]: DEBUG oslo_vmware.api [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Task: {'id': task-4836960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07265} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2479.946274] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2479.946325] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2479.946491] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2479.946660] env[61573]: INFO nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Took 0.64 seconds to destroy the instance on the hypervisor. [ 2479.949090] env[61573]: DEBUG nova.compute.claims [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2479.949267] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2479.949557] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2480.127178] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8402daab-7dba-4eb6-8557-340a974bf5d1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.134697] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bd079c-e954-438e-8e73-a6b3579b41ed {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.165344] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67c7a22-7e1d-4f0b-8fa1-1a7a83fee3c0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.174639] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8352c8b4-b247-4194-b60f-c51b3bf94213 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.191765] env[61573]: DEBUG nova.compute.provider_tree [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2480.202780] env[61573]: DEBUG nova.scheduler.client.report [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2480.220439] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.271s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.220983] env[61573]: ERROR nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2480.220983] env[61573]: Faults: ['InvalidArgument'] [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Traceback (most recent call last): [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] self.driver.spawn(context, instance, image_meta, [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] self._fetch_image_if_missing(context, vi) [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] image_cache(vi, tmp_image_ds_loc) [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] vm_util.copy_virtual_disk( [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] session._wait_for_task(vmdk_copy_task) [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] return self.wait_for_task(task_ref) [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] return evt.wait() [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] result = hub.switch() [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] return self.greenlet.switch() [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] self.f(*self.args, **self.kw) [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] raise exceptions.translate_fault(task_info.error) [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Faults: ['InvalidArgument'] [ 2480.220983] env[61573]: ERROR nova.compute.manager [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] [ 2480.221937] env[61573]: DEBUG nova.compute.utils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2480.223130] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Build of instance 2aa8f536-49ba-43f8-8f36-5741e300652a was re-scheduled: A specified parameter was not correct: fileType [ 2480.223130] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2480.223497] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2480.223670] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2480.223838] env[61573]: DEBUG nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2480.224010] env[61573]: DEBUG nova.network.neutron [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2480.810351] env[61573]: DEBUG nova.network.neutron [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2480.845019] env[61573]: INFO nova.compute.manager [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Took 0.62 seconds to deallocate network for instance. [ 2480.975916] env[61573]: INFO nova.scheduler.client.report [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Deleted allocations for instance 2aa8f536-49ba-43f8-8f36-5741e300652a [ 2480.999854] env[61573]: DEBUG oslo_concurrency.lockutils [None req-aad8418f-e1af-4a66-bad6-881a917d62de tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 484.056s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2481.000130] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 288.008s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2481.000360] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "2aa8f536-49ba-43f8-8f36-5741e300652a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2481.000565] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2481.000731] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2481.002727] env[61573]: INFO nova.compute.manager [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Terminating instance [ 2481.004552] env[61573]: DEBUG nova.compute.manager [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2481.004751] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2481.005227] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a72a1b23-0a1c-4cb6-bf88-2449956f0b27 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2481.018386] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a407f4b7-2d3d-46d3-a017-036b14dc3b0a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2481.063473] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2aa8f536-49ba-43f8-8f36-5741e300652a could not be found. [ 2481.063722] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2481.063922] env[61573]: INFO nova.compute.manager [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2481.064282] env[61573]: DEBUG oslo.service.loopingcall [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2481.064586] env[61573]: DEBUG nova.compute.manager [-] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2481.064703] env[61573]: DEBUG nova.network.neutron [-] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2481.094116] env[61573]: DEBUG nova.network.neutron [-] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2481.102812] env[61573]: INFO nova.compute.manager [-] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] Took 0.04 seconds to deallocate network for instance. [ 2481.207736] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d32d9c48-77e5-4b57-b82c-340c60dbdfad tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2481.208876] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 114.492s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2481.209091] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 2aa8f536-49ba-43f8-8f36-5741e300652a] During sync_power_state the instance has a pending task (deleting). Skip. [ 2481.209272] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "2aa8f536-49ba-43f8-8f36-5741e300652a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2502.407084] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2502.408077] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2502.408077] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2502.425981] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2502.426154] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2502.426465] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 07210470-d769-43e0-8d38-b076c374d203] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2502.426465] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2502.426657] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2502.426657] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2502.426812] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2503.404223] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2503.404223] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2503.404223] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2505.404022] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2505.417557] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2505.417925] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2505.418209] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2505.418421] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2505.420029] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46248dc1-eccb-41a5-b452-2a6efe4c78f1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.429283] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b06684e-36f2-4b37-9da1-c5af77a3bcd0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.444704] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34705f5-3c57-4762-91be-08b824ba892c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.451692] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1459ea62-c254-4064-b02e-78ff55a96320 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.481346] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180516MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2505.481506] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2505.481696] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2505.542888] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance d892e1ae-e434-40b2-b86a-99c55d473363 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2505.544031] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2505.544031] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 07210470-d769-43e0-8d38-b076c374d203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2505.544031] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 220b6e63-be92-4ac1-9561-f2967b361eee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2505.544031] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2505.544031] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b7659c4-5bbf-4f59-b858-59ff4e40036b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2505.544031] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2505.544031] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=100GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] stats={'failed_builds': '91', 'num_instances': '6', 'num_vm_building': '6', 'num_task_deleting': '4', 'num_os_type_None': '6', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '2', 'io_workload': '6', 'num_proj_9f5bf0addc41483c9bad3c1497811f08': '1', 'num_proj_f5656651931541f9b48c3e185a46a113': '1', 'num_proj_9d80b82b526a4ca9b79db7d72896c393': '1', 'num_task_spawning': '2', 'num_proj_90325af48fc44451a6c15e089107271a': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2505.633170] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee83070-a670-4c17-898e-8056888efca0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.641404] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774b8f33-cf3e-48b7-bb43-5772594dfd23 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.674424] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffffde95-1fd0-413f-b1e0-cfb3d895141a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.682364] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987a8e4e-0ecc-4a53-8917-c169ecefe2e7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.695892] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2505.704392] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2505.719021] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2505.719256] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.238s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2506.719948] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2508.400054] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2508.403855] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2508.403855] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2509.403745] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2528.014602] env[61573]: WARNING oslo_vmware.rw_handles [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2528.014602] env[61573]: ERROR oslo_vmware.rw_handles [ 2528.014602] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/86200807-8e02-4611-8558-0fde9ea1c96f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2528.016752] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2528.016752] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Copying Virtual Disk [datastore2] vmware_temp/86200807-8e02-4611-8558-0fde9ea1c96f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/86200807-8e02-4611-8558-0fde9ea1c96f/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2528.017228] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-435d8563-651f-4610-9d51-3a9143917f8c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.025271] env[61573]: DEBUG oslo_vmware.api [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 2528.025271] env[61573]: value = "task-4836961" [ 2528.025271] env[61573]: _type = "Task" [ 2528.025271] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2528.034813] env[61573]: DEBUG oslo_vmware.api [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': task-4836961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2528.536214] env[61573]: DEBUG oslo_vmware.exceptions [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2528.536501] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2528.537048] env[61573]: ERROR nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2528.537048] env[61573]: Faults: ['InvalidArgument'] [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Traceback (most recent call last): [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] yield resources [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] self.driver.spawn(context, instance, image_meta, [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] self._fetch_image_if_missing(context, vi) [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] image_cache(vi, tmp_image_ds_loc) [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] vm_util.copy_virtual_disk( [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] session._wait_for_task(vmdk_copy_task) [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] return self.wait_for_task(task_ref) [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] return evt.wait() [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] result = hub.switch() [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] return self.greenlet.switch() [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] self.f(*self.args, **self.kw) [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] raise exceptions.translate_fault(task_info.error) [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Faults: ['InvalidArgument'] [ 2528.537048] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] [ 2528.538206] env[61573]: INFO nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Terminating instance [ 2528.538958] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2528.539173] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2528.539407] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b167c37-bb39-4ffe-9ac5-71322c9ebc08 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.541621] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2528.541810] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2528.542542] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632c3a7f-bd9f-43d4-b410-46136291e8de {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.549760] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2528.550049] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efe80137-7692-46ae-a912-b15dca8fc5ec {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.552521] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2528.552696] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2528.553714] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80340f07-6f58-4948-aa23-403571baab2e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.558746] env[61573]: DEBUG oslo_vmware.api [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Waiting for the task: (returnval){ [ 2528.558746] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d84931-97bc-57bd-f828-119de2e02893" [ 2528.558746] env[61573]: _type = "Task" [ 2528.558746] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2528.569235] env[61573]: DEBUG oslo_vmware.api [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52d84931-97bc-57bd-f828-119de2e02893, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2528.628800] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2528.629087] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2528.629278] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Deleting the datastore file [datastore2] d892e1ae-e434-40b2-b86a-99c55d473363 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2528.629589] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd4978b7-1dee-4b9a-944a-620ddde277c1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.636558] env[61573]: DEBUG oslo_vmware.api [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for the task: (returnval){ [ 2528.636558] env[61573]: value = "task-4836963" [ 2528.636558] env[61573]: _type = "Task" [ 2528.636558] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2528.644371] env[61573]: DEBUG oslo_vmware.api [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': task-4836963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2529.069553] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2529.069911] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Creating directory with path [datastore2] vmware_temp/775c7a93-ef55-4829-8d6c-39a0113f4d73/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2529.070067] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-812733cf-6aa6-4f65-883a-99f93c58bc96 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.081772] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Created directory with path [datastore2] vmware_temp/775c7a93-ef55-4829-8d6c-39a0113f4d73/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2529.081980] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Fetch image to [datastore2] vmware_temp/775c7a93-ef55-4829-8d6c-39a0113f4d73/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2529.082160] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/775c7a93-ef55-4829-8d6c-39a0113f4d73/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2529.082895] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fff9d9d-524e-4f3d-aa53-aaf4585d6663 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.089976] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3ad7f5-0bec-4189-bea6-64b74b95d5d1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.100376] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf0a31e-94c0-4520-978d-e7fa8aeb2350 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.130503] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb972e77-e16c-4b77-95fd-463892fcda81 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.140196] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7e817176-10d1-4644-ac39-2d72139c2943 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.146589] env[61573]: DEBUG oslo_vmware.api [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Task: {'id': task-4836963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062415} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2529.146781] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2529.146973] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2529.147194] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2529.147373] env[61573]: INFO nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2529.150890] env[61573]: DEBUG nova.compute.claims [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2529.151096] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2529.151319] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2529.161168] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2529.225959] env[61573]: DEBUG oslo_vmware.rw_handles [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/775c7a93-ef55-4829-8d6c-39a0113f4d73/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2529.302616] env[61573]: DEBUG oslo_vmware.rw_handles [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2529.302842] env[61573]: DEBUG oslo_vmware.rw_handles [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/775c7a93-ef55-4829-8d6c-39a0113f4d73/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2529.409134] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94879e21-17bd-4572-ae2b-58ba5bce423d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.418092] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0565f49c-8e21-4bf8-8efd-bcbc6acc5235 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.450085] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2771f4-a2c5-4814-bf49-fd81dde31f64 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.457790] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa34c7d-625e-4647-928a-dc5112e27e59 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.471791] env[61573]: DEBUG nova.compute.provider_tree [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2529.480982] env[61573]: DEBUG nova.scheduler.client.report [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2529.495027] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.344s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2529.495819] env[61573]: ERROR nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2529.495819] env[61573]: Faults: ['InvalidArgument'] [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Traceback (most recent call last): [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] self.driver.spawn(context, instance, image_meta, [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] self._fetch_image_if_missing(context, vi) [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] image_cache(vi, tmp_image_ds_loc) [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] vm_util.copy_virtual_disk( [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] session._wait_for_task(vmdk_copy_task) [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] return self.wait_for_task(task_ref) [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] return evt.wait() [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] result = hub.switch() [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] return self.greenlet.switch() [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] self.f(*self.args, **self.kw) [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] raise exceptions.translate_fault(task_info.error) [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Faults: ['InvalidArgument'] [ 2529.495819] env[61573]: ERROR nova.compute.manager [instance: d892e1ae-e434-40b2-b86a-99c55d473363] [ 2529.498488] env[61573]: DEBUG nova.compute.utils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2529.502102] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Build of instance d892e1ae-e434-40b2-b86a-99c55d473363 was re-scheduled: A specified parameter was not correct: fileType [ 2529.502102] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2529.502102] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2529.502102] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2529.502102] env[61573]: DEBUG nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2529.502102] env[61573]: DEBUG nova.network.neutron [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2529.776201] env[61573]: DEBUG nova.network.neutron [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2529.794391] env[61573]: INFO nova.compute.manager [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Took 0.29 seconds to deallocate network for instance. [ 2529.936094] env[61573]: INFO nova.scheduler.client.report [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Deleted allocations for instance d892e1ae-e434-40b2-b86a-99c55d473363 [ 2529.963884] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dec65c03-8905-455d-b2a0-b6bd5898f488 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "d892e1ae-e434-40b2-b86a-99c55d473363" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 515.352s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2529.964361] env[61573]: DEBUG oslo_concurrency.lockutils [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "d892e1ae-e434-40b2-b86a-99c55d473363" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 319.861s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2529.964652] env[61573]: DEBUG oslo_concurrency.lockutils [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "d892e1ae-e434-40b2-b86a-99c55d473363-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2529.964876] env[61573]: DEBUG oslo_concurrency.lockutils [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "d892e1ae-e434-40b2-b86a-99c55d473363-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2529.965099] env[61573]: DEBUG oslo_concurrency.lockutils [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "d892e1ae-e434-40b2-b86a-99c55d473363-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2529.967488] env[61573]: INFO nova.compute.manager [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Terminating instance [ 2529.969210] env[61573]: DEBUG nova.compute.manager [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2529.969403] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2529.969924] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b078c0be-50a4-4b7f-9d13-8963dd07834b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.979564] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b23821-4eae-4b08-bc51-d4066cfee405 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2530.008502] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d892e1ae-e434-40b2-b86a-99c55d473363 could not be found. [ 2530.008772] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2530.008980] env[61573]: INFO nova.compute.manager [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2530.009275] env[61573]: DEBUG oslo.service.loopingcall [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2530.009534] env[61573]: DEBUG nova.compute.manager [-] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2530.009672] env[61573]: DEBUG nova.network.neutron [-] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2530.041739] env[61573]: DEBUG nova.network.neutron [-] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2530.052866] env[61573]: INFO nova.compute.manager [-] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] Took 0.04 seconds to deallocate network for instance. [ 2530.168492] env[61573]: DEBUG oslo_concurrency.lockutils [None req-55e41085-a74b-414f-b6b9-c88fe42e9165 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Lock "d892e1ae-e434-40b2-b86a-99c55d473363" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2530.169357] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "d892e1ae-e434-40b2-b86a-99c55d473363" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 163.453s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2530.169540] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: d892e1ae-e434-40b2-b86a-99c55d473363] During sync_power_state the instance has a pending task (deleting). Skip. [ 2530.169757] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "d892e1ae-e434-40b2-b86a-99c55d473363" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2563.404373] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2563.404820] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2563.404820] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2563.422961] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2563.423193] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 07210470-d769-43e0-8d38-b076c374d203] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2563.423265] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2563.423393] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2563.423579] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2563.423717] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2563.424238] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2564.404624] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2564.405077] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2567.407412] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2567.407788] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2567.428559] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2567.428920] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2567.429111] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2567.429496] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2567.430893] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfbec6b-86d1-4787-b201-e4c38f91a9e9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.442722] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e407f70-263a-4956-b30e-51fd99c8796e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.461943] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6345d1f-6b67-434b-b3f2-2d8c9677ac6b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.469506] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f3e946-c0a6-45d3-9acc-10d6c4836f32 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.508439] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180501MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2567.508695] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2567.508775] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2567.583605] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2567.583764] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 07210470-d769-43e0-8d38-b076c374d203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2567.583901] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 220b6e63-be92-4ac1-9561-f2967b361eee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2567.584028] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2567.584155] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b7659c4-5bbf-4f59-b858-59ff4e40036b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2567.584373] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2567.584562] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=100GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] stats={'failed_builds': '92', 'num_instances': '5', 'num_vm_building': '5', 'num_task_deleting': '3', 'num_os_type_None': '5', 'num_proj_9f5bf0addc41483c9bad3c1497811f08': '1', 'io_workload': '5', 'num_proj_f5656651931541f9b48c3e185a46a113': '1', 'num_proj_9d80b82b526a4ca9b79db7d72896c393': '1', 'num_task_spawning': '2', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2567.657314] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ba38b3-aed5-4d16-b046-6ea152b5f93d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.665843] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6856e48a-6342-484d-bdcb-c85be03433b0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.696844] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3c5af4-b2c3-42ac-907a-9da0e1a5b603 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.704643] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf914c8e-6eb8-436d-93f3-7235eb0e13d0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.719714] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2567.735517] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Acquiring lock "48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2567.735746] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Lock "48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2567.737663] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2567.753641] env[61573]: DEBUG nova.compute.manager [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2567.759797] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2567.759797] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.249s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2567.810987] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2567.811272] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2567.812701] env[61573]: INFO nova.compute.claims [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2567.943253] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9aae97-b643-44a0-938d-9b2040cdbf56 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.951360] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8956b39e-ec6c-4750-8a3f-2cd36fd45f96 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.981103] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abe525b-f17b-48c4-9138-fef65071ad57 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.988643] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48362935-b085-4b4e-8d22-becffabc401a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.002252] env[61573]: DEBUG nova.compute.provider_tree [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2568.012305] env[61573]: DEBUG nova.scheduler.client.report [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2568.027035] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.216s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2568.027576] env[61573]: DEBUG nova.compute.manager [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2568.062478] env[61573]: DEBUG nova.compute.utils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2568.064265] env[61573]: DEBUG nova.compute.manager [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Not allocating networking since 'none' was specified. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 2568.074033] env[61573]: DEBUG nova.compute.manager [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2568.137485] env[61573]: DEBUG nova.compute.manager [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2568.169053] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2568.169365] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2568.169496] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2568.169682] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2568.169831] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2568.169979] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2568.170200] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2568.170424] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2568.170607] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2568.170772] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2568.170947] env[61573]: DEBUG nova.virt.hardware [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2568.172190] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074ce212-7683-411f-af7b-dd6b083f7721 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.180664] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59da8299-b9bc-4c77-94e4-5430be29b064 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.194967] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Instance VIF info [] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2568.200441] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Creating folder: Project (db38a8dd98da4ee081139abc95adeafa). Parent ref: group-v942801. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2568.200757] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94d09e6e-c271-4a9b-96fb-862a3958496d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.211317] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Created folder: Project (db38a8dd98da4ee081139abc95adeafa) in parent group-v942801. [ 2568.211532] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Creating folder: Instances. Parent ref: group-v942924. {{(pid=61573) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2568.211773] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a686b37c-fe03-4350-a725-b5a7669546ea {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.220064] env[61573]: INFO nova.virt.vmwareapi.vm_util [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Created folder: Instances in parent group-v942924. [ 2568.220286] env[61573]: DEBUG oslo.service.loopingcall [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2568.220510] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2568.220709] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-478d9be1-153b-4ff9-9019-1cc3ba35c6eb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.237668] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2568.237668] env[61573]: value = "task-4836966" [ 2568.237668] env[61573]: _type = "Task" [ 2568.237668] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2568.245696] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836966, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2568.748895] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2568.749275] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836966, 'name': CreateVM_Task, 'duration_secs': 0.245628} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2568.749567] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2568.749996] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2568.750228] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2568.750560] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2568.750797] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09c59258-aee1-475b-8c8a-b3969a239342 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.755439] env[61573]: DEBUG oslo_vmware.api [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Waiting for the task: (returnval){ [ 2568.755439] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52800d99-9f00-73b4-ef3c-ae10dc02d3f7" [ 2568.755439] env[61573]: _type = "Task" [ 2568.755439] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2568.763538] env[61573]: DEBUG oslo_vmware.api [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52800d99-9f00-73b4-ef3c-ae10dc02d3f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2569.266529] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2569.266800] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2569.267025] env[61573]: DEBUG oslo_concurrency.lockutils [None req-016465e7-a724-404b-b368-8444d6b970cd tempest-ServerShowV257Test-270489710 tempest-ServerShowV257Test-270489710-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2569.403821] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2569.404034] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2570.399575] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2570.421581] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2575.154407] env[61573]: WARNING oslo_vmware.rw_handles [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2575.154407] env[61573]: ERROR oslo_vmware.rw_handles [ 2575.155221] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/775c7a93-ef55-4829-8d6c-39a0113f4d73/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2575.156527] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2575.156944] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Copying Virtual Disk [datastore2] vmware_temp/775c7a93-ef55-4829-8d6c-39a0113f4d73/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/775c7a93-ef55-4829-8d6c-39a0113f4d73/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2575.157062] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d8478b2-0bab-4ecf-ae0e-ed6241eadbb2 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.165505] env[61573]: DEBUG oslo_vmware.api [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Waiting for the task: (returnval){ [ 2575.165505] env[61573]: value = "task-4836967" [ 2575.165505] env[61573]: _type = "Task" [ 2575.165505] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2575.174528] env[61573]: DEBUG oslo_vmware.api [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Task: {'id': task-4836967, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2575.676324] env[61573]: DEBUG oslo_vmware.exceptions [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2575.676617] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2575.677203] env[61573]: ERROR nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2575.677203] env[61573]: Faults: ['InvalidArgument'] [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Traceback (most recent call last): [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] yield resources [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] self.driver.spawn(context, instance, image_meta, [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] self._fetch_image_if_missing(context, vi) [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] image_cache(vi, tmp_image_ds_loc) [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] vm_util.copy_virtual_disk( [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] session._wait_for_task(vmdk_copy_task) [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] return self.wait_for_task(task_ref) [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] return evt.wait() [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] result = hub.switch() [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] return self.greenlet.switch() [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] self.f(*self.args, **self.kw) [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] raise exceptions.translate_fault(task_info.error) [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Faults: ['InvalidArgument'] [ 2575.677203] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] [ 2575.678266] env[61573]: INFO nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Terminating instance [ 2575.679128] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2575.679340] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2575.679580] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e3dd4c4-063a-4c2f-8fa8-244f96473a2e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.682064] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2575.682265] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2575.683019] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2be8576-87d3-456a-a61a-1fcd870f05a7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.690058] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2575.690334] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01bdb300-34a4-49a3-b281-07c055d16046 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.692760] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2575.692889] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2575.693911] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4473887f-13e3-469f-b3f3-211120e07a53 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.700664] env[61573]: DEBUG oslo_vmware.api [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Waiting for the task: (returnval){ [ 2575.700664] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5273423e-30b6-4c55-2f58-bfdd6fac8bb7" [ 2575.700664] env[61573]: _type = "Task" [ 2575.700664] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2575.709663] env[61573]: DEBUG oslo_vmware.api [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]5273423e-30b6-4c55-2f58-bfdd6fac8bb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2575.760753] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2575.761012] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2575.761211] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Deleting the datastore file [datastore2] 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2575.761517] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a47dd23f-7db0-439e-bffa-5c453151f0eb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.768689] env[61573]: DEBUG oslo_vmware.api [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Waiting for the task: (returnval){ [ 2575.768689] env[61573]: value = "task-4836969" [ 2575.768689] env[61573]: _type = "Task" [ 2575.768689] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2575.776949] env[61573]: DEBUG oslo_vmware.api [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Task: {'id': task-4836969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2576.210592] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2576.210940] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Creating directory with path [datastore2] vmware_temp/76da4a80-41b2-43b8-b32b-cce1cf72237c/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2576.211171] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8615501c-ad20-4a8d-b3bb-703eeb6ba82a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.222588] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Created directory with path [datastore2] vmware_temp/76da4a80-41b2-43b8-b32b-cce1cf72237c/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2576.222807] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Fetch image to [datastore2] vmware_temp/76da4a80-41b2-43b8-b32b-cce1cf72237c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2576.222982] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/76da4a80-41b2-43b8-b32b-cce1cf72237c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2576.223819] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1459d3b8-e5b9-449b-8acb-aaa0b1ad2d38 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.230877] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00bfa0b-5ddb-42cc-b2fc-756ecd4ade23 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.240497] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8572bdda-7eb2-4b8c-bd03-f6dc9200f14c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.274804] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4b40eb-f8dd-42d0-8db0-a47cda0b7b20 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.284194] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-49659ca5-7dc5-49bc-bb39-591d9dc88b56 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.285933] env[61573]: DEBUG oslo_vmware.api [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Task: {'id': task-4836969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081565} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2576.286196] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2576.286383] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2576.286551] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2576.286739] env[61573]: INFO nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2576.288829] env[61573]: DEBUG nova.compute.claims [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2576.289048] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2576.289257] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2576.308412] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2576.367814] env[61573]: DEBUG oslo_vmware.rw_handles [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76da4a80-41b2-43b8-b32b-cce1cf72237c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2576.427602] env[61573]: DEBUG oslo_vmware.rw_handles [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2576.427842] env[61573]: DEBUG oslo_vmware.rw_handles [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76da4a80-41b2-43b8-b32b-cce1cf72237c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2576.482032] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f15691-1c31-4931-a9c6-04d12e0f28c1 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.489955] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa47417b-c866-46d0-9cdc-e64505f984d4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.523291] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d185ca1-6640-4a91-9bd8-f856fbd95c83 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.531106] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5893e514-b72f-491e-ab16-d5f49c56075a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.545186] env[61573]: DEBUG nova.compute.provider_tree [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2576.553858] env[61573]: DEBUG nova.scheduler.client.report [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2576.569649] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.280s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2576.570201] env[61573]: ERROR nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2576.570201] env[61573]: Faults: ['InvalidArgument'] [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Traceback (most recent call last): [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] self.driver.spawn(context, instance, image_meta, [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] self._fetch_image_if_missing(context, vi) [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] image_cache(vi, tmp_image_ds_loc) [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] vm_util.copy_virtual_disk( [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] session._wait_for_task(vmdk_copy_task) [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] return self.wait_for_task(task_ref) [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] return evt.wait() [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] result = hub.switch() [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] return self.greenlet.switch() [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] self.f(*self.args, **self.kw) [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] raise exceptions.translate_fault(task_info.error) [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Faults: ['InvalidArgument'] [ 2576.570201] env[61573]: ERROR nova.compute.manager [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] [ 2576.571314] env[61573]: DEBUG nova.compute.utils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2576.572465] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Build of instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 was re-scheduled: A specified parameter was not correct: fileType [ 2576.572465] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2576.572846] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2576.573031] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2576.573208] env[61573]: DEBUG nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2576.573407] env[61573]: DEBUG nova.network.neutron [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2576.886785] env[61573]: DEBUG nova.network.neutron [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2576.904750] env[61573]: INFO nova.compute.manager [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Took 0.33 seconds to deallocate network for instance. [ 2576.999492] env[61573]: INFO nova.scheduler.client.report [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Deleted allocations for instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 [ 2577.024174] env[61573]: DEBUG oslo_concurrency.lockutils [None req-03489678-f201-4f27-8c1f-565c7a461153 tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 408.914s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2577.024174] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 213.094s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2577.024174] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Acquiring lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2577.024497] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2577.024497] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2577.027020] env[61573]: INFO nova.compute.manager [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Terminating instance [ 2577.028871] env[61573]: DEBUG nova.compute.manager [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2577.029092] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2577.029639] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7fa87b0-e4d9-4751-9de7-20294d9e4058 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.040903] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5575893f-cffe-4a3f-83c9-eab1c3f7ff3a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.072786] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 427e30fb-7af2-4ecc-934a-bb2b8d2cb320 could not be found. [ 2577.073053] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2577.073244] env[61573]: INFO nova.compute.manager [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2577.073498] env[61573]: DEBUG oslo.service.loopingcall [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2577.073783] env[61573]: DEBUG nova.compute.manager [-] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2577.073877] env[61573]: DEBUG nova.network.neutron [-] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2577.104502] env[61573]: DEBUG nova.network.neutron [-] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2577.113409] env[61573]: INFO nova.compute.manager [-] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] Took 0.04 seconds to deallocate network for instance. [ 2577.223908] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d05c28cf-80b9-45ad-b685-1aa563a20ffd tempest-ServerActionsTestOtherB-1943516402 tempest-ServerActionsTestOtherB-1943516402-project-member] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2577.225328] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 210.508s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2577.225648] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 427e30fb-7af2-4ecc-934a-bb2b8d2cb320] During sync_power_state the instance has a pending task (deleting). Skip. [ 2577.225883] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "427e30fb-7af2-4ecc-934a-bb2b8d2cb320" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2585.395961] env[61573]: DEBUG oslo_concurrency.lockutils [None req-dc88cee3-1476-4a35-9cbf-99804a75a2bf tempest-ServerDiskConfigTestJSON-1776322601 tempest-ServerDiskConfigTestJSON-1776322601-project-member] Acquiring lock "0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2595.601474] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "7bae696f-64f7-426c-a0dd-d65ea7b58f4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2595.601905] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Lock "7bae696f-64f7-426c-a0dd-d65ea7b58f4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2595.616366] env[61573]: DEBUG nova.compute.manager [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Starting instance... {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2595.693201] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2595.693518] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2595.695210] env[61573]: INFO nova.compute.claims [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2595.824341] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9518bf9d-ca3e-47e0-abc6-bed944eb7a2e {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.832143] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51de2d0d-d6ad-46c9-b9a9-76ff11589269 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.864552] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4af97fe-109f-4645-b938-6c2a8cf98c6c {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.872767] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce974f2-f18a-447b-9869-2283406ea8d5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.886973] env[61573]: DEBUG nova.compute.provider_tree [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2595.895926] env[61573]: DEBUG nova.scheduler.client.report [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2595.909992] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.216s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2595.910492] env[61573]: DEBUG nova.compute.manager [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Start building networks asynchronously for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2595.944508] env[61573]: DEBUG nova.compute.utils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Using /dev/sd instead of None {{(pid=61573) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2595.946123] env[61573]: DEBUG nova.compute.manager [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Allocating IP information in the background. {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2595.946313] env[61573]: DEBUG nova.network.neutron [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] allocate_for_instance() {{(pid=61573) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2595.955861] env[61573]: DEBUG nova.compute.manager [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Start building block device mappings for instance. {{(pid=61573) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2596.003144] env[61573]: DEBUG nova.policy [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '468062a6200749a886894f41bf8063de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d80b82b526a4ca9b79db7d72896c393', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61573) authorize /opt/stack/nova/nova/policy.py:203}} [ 2596.023445] env[61573]: DEBUG nova.compute.manager [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Start spawning the instance on the hypervisor. {{(pid=61573) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2596.048281] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T08:20:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T08:20:43Z,direct_url=,disk_format='vmdk',id=896f953b-9c5a-4a3b-82f8-e48debb67b3a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b6b1daa7c889493c969d61d1a6ca8f52',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T08:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2596.048541] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Flavor limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2596.048721] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Image limits 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2596.048917] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Flavor pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2596.049077] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Image pref 0:0:0 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2596.049227] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61573) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2596.049429] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2596.049589] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2596.049756] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Got 1 possible topologies {{(pid=61573) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2596.049914] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2596.050143] env[61573]: DEBUG nova.virt.hardware [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61573) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2596.051015] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9191670-6240-442d-892a-d93f1e521e3a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2596.058822] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e133f84-3d8a-4b92-818b-4dacd7cb6127 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2596.423772] env[61573]: DEBUG nova.network.neutron [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Successfully created port: a9357ecb-3386-4172-a6f4-8a48b7f76373 {{(pid=61573) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2597.031838] env[61573]: DEBUG nova.compute.manager [req-5be653b7-8291-419a-8e04-1bc9b5b661e0 req-1d18e121-3eb4-42c7-8954-bc21b907050a service nova] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Received event network-vif-plugged-a9357ecb-3386-4172-a6f4-8a48b7f76373 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2597.032161] env[61573]: DEBUG oslo_concurrency.lockutils [req-5be653b7-8291-419a-8e04-1bc9b5b661e0 req-1d18e121-3eb4-42c7-8954-bc21b907050a service nova] Acquiring lock "7bae696f-64f7-426c-a0dd-d65ea7b58f4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2597.032301] env[61573]: DEBUG oslo_concurrency.lockutils [req-5be653b7-8291-419a-8e04-1bc9b5b661e0 req-1d18e121-3eb4-42c7-8954-bc21b907050a service nova] Lock "7bae696f-64f7-426c-a0dd-d65ea7b58f4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2597.032470] env[61573]: DEBUG oslo_concurrency.lockutils [req-5be653b7-8291-419a-8e04-1bc9b5b661e0 req-1d18e121-3eb4-42c7-8954-bc21b907050a service nova] Lock "7bae696f-64f7-426c-a0dd-d65ea7b58f4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2597.033343] env[61573]: DEBUG nova.compute.manager [req-5be653b7-8291-419a-8e04-1bc9b5b661e0 req-1d18e121-3eb4-42c7-8954-bc21b907050a service nova] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] No waiting events found dispatching network-vif-plugged-a9357ecb-3386-4172-a6f4-8a48b7f76373 {{(pid=61573) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2597.033555] env[61573]: WARNING nova.compute.manager [req-5be653b7-8291-419a-8e04-1bc9b5b661e0 req-1d18e121-3eb4-42c7-8954-bc21b907050a service nova] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Received unexpected event network-vif-plugged-a9357ecb-3386-4172-a6f4-8a48b7f76373 for instance with vm_state building and task_state spawning. [ 2597.134358] env[61573]: DEBUG nova.network.neutron [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Successfully updated port: a9357ecb-3386-4172-a6f4-8a48b7f76373 {{(pid=61573) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2597.151170] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "refresh_cache-7bae696f-64f7-426c-a0dd-d65ea7b58f4f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2597.151312] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquired lock "refresh_cache-7bae696f-64f7-426c-a0dd-d65ea7b58f4f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2597.151461] env[61573]: DEBUG nova.network.neutron [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2597.203319] env[61573]: DEBUG nova.network.neutron [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2597.410432] env[61573]: DEBUG nova.network.neutron [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Updating instance_info_cache with network_info: [{"id": "a9357ecb-3386-4172-a6f4-8a48b7f76373", "address": "fa:16:3e:c5:e3:08", "network": {"id": "d1e7930d-0900-44bf-b3a0-3e89d3ab1cc9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1616538157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d80b82b526a4ca9b79db7d72896c393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9357ecb-33", "ovs_interfaceid": "a9357ecb-3386-4172-a6f4-8a48b7f76373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2597.425672] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Releasing lock "refresh_cache-7bae696f-64f7-426c-a0dd-d65ea7b58f4f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2597.425968] env[61573]: DEBUG nova.compute.manager [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Instance network_info: |[{"id": "a9357ecb-3386-4172-a6f4-8a48b7f76373", "address": "fa:16:3e:c5:e3:08", "network": {"id": "d1e7930d-0900-44bf-b3a0-3e89d3ab1cc9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1616538157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d80b82b526a4ca9b79db7d72896c393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9357ecb-33", "ovs_interfaceid": "a9357ecb-3386-4172-a6f4-8a48b7f76373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61573) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2597.426381] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:e3:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9357ecb-3386-4172-a6f4-8a48b7f76373', 'vif_model': 'vmxnet3'}] {{(pid=61573) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2597.434254] env[61573]: DEBUG oslo.service.loopingcall [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2597.434741] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Creating VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2597.434969] env[61573]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d29ba9e-1e48-4b55-9a92-34f7e2bc483b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2597.455740] env[61573]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2597.455740] env[61573]: value = "task-4836970" [ 2597.455740] env[61573]: _type = "Task" [ 2597.455740] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2597.463782] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836970, 'name': CreateVM_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2597.967064] env[61573]: DEBUG oslo_vmware.api [-] Task: {'id': task-4836970, 'name': CreateVM_Task, 'duration_secs': 0.333459} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2597.967064] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Created VM on the ESX host {{(pid=61573) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2597.967533] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2597.967705] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2597.968034] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2597.968300] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04a1cf2c-dbf0-4e26-a6f8-a4dd77ee4c7d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2597.973208] env[61573]: DEBUG oslo_vmware.api [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Waiting for the task: (returnval){ [ 2597.973208] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52206549-e062-d6bb-74ce-a9a7b4ca176b" [ 2597.973208] env[61573]: _type = "Task" [ 2597.973208] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2597.981396] env[61573]: DEBUG oslo_vmware.api [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52206549-e062-d6bb-74ce-a9a7b4ca176b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2598.484973] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2598.485361] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Processing image 896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2598.485502] env[61573]: DEBUG oslo_concurrency.lockutils [None req-f8c9c441-b8bf-4464-9b52-6918499abde4 tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2599.065564] env[61573]: DEBUG nova.compute.manager [req-3fb98445-0ede-4b4e-8cc2-2e8cf3b12e5f req-63187c54-a6f3-4c1e-87fa-8f413e288a92 service nova] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Received event network-changed-a9357ecb-3386-4172-a6f4-8a48b7f76373 {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11198}} [ 2599.065802] env[61573]: DEBUG nova.compute.manager [req-3fb98445-0ede-4b4e-8cc2-2e8cf3b12e5f req-63187c54-a6f3-4c1e-87fa-8f413e288a92 service nova] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Refreshing instance network info cache due to event network-changed-a9357ecb-3386-4172-a6f4-8a48b7f76373. {{(pid=61573) external_instance_event /opt/stack/nova/nova/compute/manager.py:11203}} [ 2599.065976] env[61573]: DEBUG oslo_concurrency.lockutils [req-3fb98445-0ede-4b4e-8cc2-2e8cf3b12e5f req-63187c54-a6f3-4c1e-87fa-8f413e288a92 service nova] Acquiring lock "refresh_cache-7bae696f-64f7-426c-a0dd-d65ea7b58f4f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2599.066137] env[61573]: DEBUG oslo_concurrency.lockutils [req-3fb98445-0ede-4b4e-8cc2-2e8cf3b12e5f req-63187c54-a6f3-4c1e-87fa-8f413e288a92 service nova] Acquired lock "refresh_cache-7bae696f-64f7-426c-a0dd-d65ea7b58f4f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2599.066315] env[61573]: DEBUG nova.network.neutron [req-3fb98445-0ede-4b4e-8cc2-2e8cf3b12e5f req-63187c54-a6f3-4c1e-87fa-8f413e288a92 service nova] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Refreshing network info cache for port a9357ecb-3386-4172-a6f4-8a48b7f76373 {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2599.323366] env[61573]: DEBUG nova.network.neutron [req-3fb98445-0ede-4b4e-8cc2-2e8cf3b12e5f req-63187c54-a6f3-4c1e-87fa-8f413e288a92 service nova] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Updated VIF entry in instance network info cache for port a9357ecb-3386-4172-a6f4-8a48b7f76373. {{(pid=61573) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2599.323720] env[61573]: DEBUG nova.network.neutron [req-3fb98445-0ede-4b4e-8cc2-2e8cf3b12e5f req-63187c54-a6f3-4c1e-87fa-8f413e288a92 service nova] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Updating instance_info_cache with network_info: [{"id": "a9357ecb-3386-4172-a6f4-8a48b7f76373", "address": "fa:16:3e:c5:e3:08", "network": {"id": "d1e7930d-0900-44bf-b3a0-3e89d3ab1cc9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1616538157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d80b82b526a4ca9b79db7d72896c393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9357ecb-33", "ovs_interfaceid": "a9357ecb-3386-4172-a6f4-8a48b7f76373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2599.334221] env[61573]: DEBUG oslo_concurrency.lockutils [req-3fb98445-0ede-4b4e-8cc2-2e8cf3b12e5f req-63187c54-a6f3-4c1e-87fa-8f413e288a92 service nova] Releasing lock "refresh_cache-7bae696f-64f7-426c-a0dd-d65ea7b58f4f" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2602.424548] env[61573]: DEBUG oslo_concurrency.lockutils [None req-bbdeb62a-f9d7-4df7-afbe-76b68d471355 tempest-ServersTestJSON-411708961 tempest-ServersTestJSON-411708961-project-member] Acquiring lock "4b7659c4-5bbf-4f59-b858-59ff4e40036b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2616.404471] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2616.404858] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11295}} [ 2616.416172] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] There are 0 instances to clean {{(pid=61573) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11304}} [ 2621.698232] env[61573]: WARNING oslo_vmware.rw_handles [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles response.begin() [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2621.698232] env[61573]: ERROR oslo_vmware.rw_handles [ 2621.698872] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Downloaded image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to vmware_temp/76da4a80-41b2-43b8-b32b-cce1cf72237c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2621.700823] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Caching image {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2621.701129] env[61573]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Copying Virtual Disk [datastore2] vmware_temp/76da4a80-41b2-43b8-b32b-cce1cf72237c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk to [datastore2] vmware_temp/76da4a80-41b2-43b8-b32b-cce1cf72237c/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk {{(pid=61573) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2621.701440] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b3a51e7-735a-4d24-a539-b4d4fdadddf0 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.710013] env[61573]: DEBUG oslo_vmware.api [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Waiting for the task: (returnval){ [ 2621.710013] env[61573]: value = "task-4836971" [ 2621.710013] env[61573]: _type = "Task" [ 2621.710013] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2621.718908] env[61573]: DEBUG oslo_vmware.api [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Task: {'id': task-4836971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2622.221184] env[61573]: DEBUG oslo_vmware.exceptions [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Fault InvalidArgument not matched. {{(pid=61573) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2622.221497] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Releasing lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2622.222082] env[61573]: ERROR nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2622.222082] env[61573]: Faults: ['InvalidArgument'] [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] Traceback (most recent call last): [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] yield resources [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] self.driver.spawn(context, instance, image_meta, [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] self._fetch_image_if_missing(context, vi) [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] image_cache(vi, tmp_image_ds_loc) [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] vm_util.copy_virtual_disk( [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] session._wait_for_task(vmdk_copy_task) [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] return self.wait_for_task(task_ref) [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] return evt.wait() [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] result = hub.switch() [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] return self.greenlet.switch() [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] self.f(*self.args, **self.kw) [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] raise exceptions.translate_fault(task_info.error) [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] Faults: ['InvalidArgument'] [ 2622.222082] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] [ 2622.223025] env[61573]: INFO nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Terminating instance [ 2622.223990] env[61573]: DEBUG oslo_concurrency.lockutils [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Acquired lock "[datastore2] devstack-image-cache_base/896f953b-9c5a-4a3b-82f8-e48debb67b3a/896f953b-9c5a-4a3b-82f8-e48debb67b3a.vmdk" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2622.224233] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2622.224485] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da41c47e-a39f-4b22-aae4-23e5209c6b66 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.226730] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "refresh_cache-07210470-d769-43e0-8d38-b076c374d203" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2622.226892] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquired lock "refresh_cache-07210470-d769-43e0-8d38-b076c374d203" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2622.227100] env[61573]: DEBUG nova.network.neutron [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2622.235269] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2622.235455] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61573) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2622.236780] env[61573]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5b9e22f-fb4d-4929-ab28-8cfa136aff40 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.245269] env[61573]: DEBUG oslo_vmware.api [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Waiting for the task: (returnval){ [ 2622.245269] env[61573]: value = "session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52137f96-4095-9185-dfb5-bf76e4ffe0bf" [ 2622.245269] env[61573]: _type = "Task" [ 2622.245269] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2622.253644] env[61573]: DEBUG oslo_vmware.api [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Task: {'id': session[52ade82c-3ebf-f795-08d4-3cb1a4f2c4fc]52137f96-4095-9185-dfb5-bf76e4ffe0bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2622.269283] env[61573]: DEBUG nova.network.neutron [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2622.364637] env[61573]: DEBUG nova.network.neutron [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2622.374915] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Releasing lock "refresh_cache-07210470-d769-43e0-8d38-b076c374d203" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2622.375450] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2622.375660] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2622.376804] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79fea0a-2704-47fd-9e1b-f1adaf9d7678 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.385332] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Unregistering the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2622.385581] env[61573]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ec35786-88b1-48e5-8823-cd5261a2433a {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.418679] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Unregistered the VM {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2622.419029] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Deleting contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2622.419365] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Deleting the datastore file [datastore2] 07210470-d769-43e0-8d38-b076c374d203 {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2622.419760] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74b0d06d-3ed8-4318-8f92-77f04efae921 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.428009] env[61573]: DEBUG oslo_vmware.api [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Waiting for the task: (returnval){ [ 2622.428009] env[61573]: value = "task-4836973" [ 2622.428009] env[61573]: _type = "Task" [ 2622.428009] env[61573]: } to complete. {{(pid=61573) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2622.440444] env[61573]: DEBUG oslo_vmware.api [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Task: {'id': task-4836973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2622.755770] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Preparing fetch location {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2622.756185] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Creating directory with path [datastore2] vmware_temp/406c91e6-8496-40ec-b858-3476beb45b48/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2622.756311] env[61573]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b06ca3b5-3297-43e2-b761-1f03aa6c5590 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.768476] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Created directory with path [datastore2] vmware_temp/406c91e6-8496-40ec-b858-3476beb45b48/896f953b-9c5a-4a3b-82f8-e48debb67b3a {{(pid=61573) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2622.768724] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Fetch image to [datastore2] vmware_temp/406c91e6-8496-40ec-b858-3476beb45b48/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk {{(pid=61573) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2622.768933] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to [datastore2] vmware_temp/406c91e6-8496-40ec-b858-3476beb45b48/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61573) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2622.769667] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4425675-e01a-4972-9a5c-7dbedb5152d9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.776776] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b90557-f095-4b83-bea1-c8c655fbfca5 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.785941] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a8a4f7-f833-43b6-ae34-7865e88bdcbc {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.817642] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28879d43-978d-4a18-a408-e5c4f1913643 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.824495] env[61573]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-92171d5f-2c1a-4fee-8382-71d703ca03b9 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.844837] env[61573]: DEBUG nova.virt.vmwareapi.images [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Downloading image file data 896f953b-9c5a-4a3b-82f8-e48debb67b3a to the data store datastore2 {{(pid=61573) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2622.897277] env[61573]: DEBUG oslo_vmware.rw_handles [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/406c91e6-8496-40ec-b858-3476beb45b48/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2622.955751] env[61573]: DEBUG oslo_vmware.rw_handles [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Completed reading data from the image iterator. {{(pid=61573) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2622.956037] env[61573]: DEBUG oslo_vmware.rw_handles [None req-9fb983d5-7bce-4d40-9bd3-dcc9f6b188ed tempest-AttachVolumeNegativeTest-1735044359 tempest-AttachVolumeNegativeTest-1735044359-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/406c91e6-8496-40ec-b858-3476beb45b48/896f953b-9c5a-4a3b-82f8-e48debb67b3a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61573) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2622.960180] env[61573]: DEBUG oslo_vmware.api [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Task: {'id': task-4836973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043468} completed successfully. {{(pid=61573) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2622.960452] env[61573]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Deleted the datastore file {{(pid=61573) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2622.960641] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Deleted contents of the VM from datastore datastore2 {{(pid=61573) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2622.960833] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2622.961055] env[61573]: INFO nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2622.961336] env[61573]: DEBUG oslo.service.loopingcall [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2622.961547] env[61573]: DEBUG nova.compute.manager [-] [instance: 07210470-d769-43e0-8d38-b076c374d203] Skipping network deallocation for instance since networking was not requested. {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2622.963829] env[61573]: DEBUG nova.compute.claims [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Aborting claim: {{(pid=61573) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2622.963998] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2622.964231] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2623.058108] env[61573]: DEBUG nova.scheduler.client.report [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Refreshing inventories for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2623.074664] env[61573]: DEBUG nova.scheduler.client.report [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Updating ProviderTree inventory for provider b1eff98b-2b30-4574-a87d-d151235a2dba from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2623.074664] env[61573]: DEBUG nova.compute.provider_tree [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Updating inventory in ProviderTree for provider b1eff98b-2b30-4574-a87d-d151235a2dba with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2623.088860] env[61573]: DEBUG nova.scheduler.client.report [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Refreshing aggregate associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, aggregates: None {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2623.117127] env[61573]: DEBUG nova.scheduler.client.report [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Refreshing trait associations for resource provider b1eff98b-2b30-4574-a87d-d151235a2dba, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=61573) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2623.207549] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6971112f-16d8-457f-8c5a-fd28c43e612f {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.215401] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ba7e02-1b15-4a83-ba64-c840e1da1c03 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.246146] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e34ed4-849e-44da-a4b4-9864bf6685a7 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.254097] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9eb3a4a-01b4-4664-be32-ce49dfc9b637 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.267841] env[61573]: DEBUG nova.compute.provider_tree [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2623.278305] env[61573]: DEBUG nova.scheduler.client.report [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2623.294281] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.330s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2623.294823] env[61573]: ERROR nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2623.294823] env[61573]: Faults: ['InvalidArgument'] [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] Traceback (most recent call last): [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] self.driver.spawn(context, instance, image_meta, [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] self._fetch_image_if_missing(context, vi) [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] image_cache(vi, tmp_image_ds_loc) [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] vm_util.copy_virtual_disk( [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] session._wait_for_task(vmdk_copy_task) [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] return self.wait_for_task(task_ref) [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] return evt.wait() [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] result = hub.switch() [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] return self.greenlet.switch() [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] self.f(*self.args, **self.kw) [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] raise exceptions.translate_fault(task_info.error) [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] Faults: ['InvalidArgument'] [ 2623.294823] env[61573]: ERROR nova.compute.manager [instance: 07210470-d769-43e0-8d38-b076c374d203] [ 2623.295782] env[61573]: DEBUG nova.compute.utils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] VimFaultException {{(pid=61573) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2623.297039] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Build of instance 07210470-d769-43e0-8d38-b076c374d203 was re-scheduled: A specified parameter was not correct: fileType [ 2623.297039] env[61573]: Faults: ['InvalidArgument'] {{(pid=61573) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2623.297418] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Unplugging VIFs for instance {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2623.297640] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "refresh_cache-07210470-d769-43e0-8d38-b076c374d203" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2623.297786] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquired lock "refresh_cache-07210470-d769-43e0-8d38-b076c374d203" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2623.297945] env[61573]: DEBUG nova.network.neutron [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2623.323313] env[61573]: DEBUG nova.network.neutron [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2623.385253] env[61573]: DEBUG nova.network.neutron [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2623.394924] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Releasing lock "refresh_cache-07210470-d769-43e0-8d38-b076c374d203" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2623.395165] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61573) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2623.395348] env[61573]: DEBUG nova.compute.manager [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Skipping network deallocation for instance since networking was not requested. {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2623.415695] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2623.500673] env[61573]: INFO nova.scheduler.client.report [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Deleted allocations for instance 07210470-d769-43e0-8d38-b076c374d203 [ 2623.526732] env[61573]: DEBUG oslo_concurrency.lockutils [None req-d6fa8dc4-bcc0-42ac-89ef-7d951212305a tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "07210470-d769-43e0-8d38-b076c374d203" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 449.125s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2623.526954] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "07210470-d769-43e0-8d38-b076c374d203" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 256.810s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2623.527163] env[61573]: INFO nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 07210470-d769-43e0-8d38-b076c374d203] During sync_power_state the instance has a pending task (spawning). Skip. [ 2623.527341] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "07210470-d769-43e0-8d38-b076c374d203" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2623.527846] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "07210470-d769-43e0-8d38-b076c374d203" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 252.987s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2623.528077] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "07210470-d769-43e0-8d38-b076c374d203-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2623.528290] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "07210470-d769-43e0-8d38-b076c374d203-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2623.528453] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "07210470-d769-43e0-8d38-b076c374d203-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2623.531023] env[61573]: INFO nova.compute.manager [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Terminating instance [ 2623.532768] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquiring lock "refresh_cache-07210470-d769-43e0-8d38-b076c374d203" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2623.532916] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Acquired lock "refresh_cache-07210470-d769-43e0-8d38-b076c374d203" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2623.533112] env[61573]: DEBUG nova.network.neutron [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Building network info cache for instance {{(pid=61573) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2623.570629] env[61573]: DEBUG nova.network.neutron [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2623.687413] env[61573]: DEBUG nova.network.neutron [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2623.699319] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Releasing lock "refresh_cache-07210470-d769-43e0-8d38-b076c374d203" {{(pid=61573) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2623.699659] env[61573]: DEBUG nova.compute.manager [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Start destroying the instance on the hypervisor. {{(pid=61573) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2623.700272] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Destroying instance {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2623.700451] env[61573]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-172be341-fb4b-45da-a47d-02093f48b57b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.710494] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30a7399-4098-4589-a0c0-3fff15c331fb {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.739926] env[61573]: WARNING nova.virt.vmwareapi.vmops [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 07210470-d769-43e0-8d38-b076c374d203 could not be found. [ 2623.740155] env[61573]: DEBUG nova.virt.vmwareapi.vmops [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Instance destroyed {{(pid=61573) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2623.740337] env[61573]: INFO nova.compute.manager [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] [instance: 07210470-d769-43e0-8d38-b076c374d203] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2623.740579] env[61573]: DEBUG oslo.service.loopingcall [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61573) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2623.740806] env[61573]: DEBUG nova.compute.manager [-] [instance: 07210470-d769-43e0-8d38-b076c374d203] Deallocating network for instance {{(pid=61573) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2623.740903] env[61573]: DEBUG nova.network.neutron [-] [instance: 07210470-d769-43e0-8d38-b076c374d203] deallocate_for_instance() {{(pid=61573) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2623.758878] env[61573]: DEBUG nova.network.neutron [-] [instance: 07210470-d769-43e0-8d38-b076c374d203] Instance cache missing network info. {{(pid=61573) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2623.767110] env[61573]: DEBUG nova.network.neutron [-] [instance: 07210470-d769-43e0-8d38-b076c374d203] Updating instance_info_cache with network_info: [] {{(pid=61573) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2623.776314] env[61573]: INFO nova.compute.manager [-] [instance: 07210470-d769-43e0-8d38-b076c374d203] Took 0.04 seconds to deallocate network for instance. [ 2623.864698] env[61573]: DEBUG oslo_concurrency.lockutils [None req-6eeeca21-13f1-4321-a1b3-fd7208b35b55 tempest-ServersListShow296Test-1549879912 tempest-ServersListShow296Test-1549879912-project-member] Lock "07210470-d769-43e0-8d38-b076c374d203" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.337s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2625.404482] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2625.404872] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Starting heal instance info cache {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9995}} [ 2625.404872] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Rebuilding the list of instances to heal {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 2625.420145] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 220b6e63-be92-4ac1-9561-f2967b361eee] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2625.420308] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2625.420409] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 4b7659c4-5bbf-4f59-b858-59ff4e40036b] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2625.420536] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2625.420660] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] [instance: 7bae696f-64f7-426c-a0dd-d65ea7b58f4f] Skipping network cache update for instance because it is Building. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 2625.420779] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Didn't find any instances for network info cache update. {{(pid=61573) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10081}} [ 2625.421435] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2625.421581] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61573) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10614}} [ 2625.421756] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2625.421881] env[61573]: DEBUG nova.compute.manager [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Cleaning up deleted instances with incomplete migration {{(pid=61573) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11333}} [ 2628.427492] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2629.404016] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2629.404306] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2629.416568] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2629.416785] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2629.416951] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2629.417119] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61573) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2629.418348] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817faa38-3e6e-4604-97cf-7a9c6fa75b44 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2629.427579] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69fae2ca-7e69-49be-bba9-a79ef5c298b4 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2629.442217] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58c8235-7ad5-4324-8699-eacf85da1b2b {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2629.448991] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8e7a21-680b-48e2-8f3e-cf52109d7586 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2629.477573] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180482MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61573) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2629.477760] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2629.477864] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2629.535029] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 220b6e63-be92-4ac1-9561-f2967b361eee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2629.535338] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 0ebe9ee0-2e37-4fba-b1c6-8ebc9b235dbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2629.535465] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 4b7659c4-5bbf-4f59-b858-59ff4e40036b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2629.535584] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 48a7b4a7-4a0c-4528-bffa-4e6d8861b6aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2629.535699] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Instance 7bae696f-64f7-426c-a0dd-d65ea7b58f4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61573) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2629.535881] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2629.536033] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=100GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] stats={'failed_builds': '94', 'num_instances': '5', 'num_vm_building': '5', 'num_task_deleting': '3', 'num_os_type_None': '5', 'num_proj_9d80b82b526a4ca9b79db7d72896c393': '2', 'io_workload': '5', 'num_proj_90325af48fc44451a6c15e089107271a': '1', 'num_proj_e08d442d2b554ce6bd9e2cc031cf6735': '1', 'num_task_spawning': '2', 'num_proj_db38a8dd98da4ee081139abc95adeafa': '1'} {{(pid=61573) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2629.610868] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a824ab02-bdfc-4e50-afbc-4c83d74410ca {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2629.620479] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb05160-ae8a-4653-a390-4c663b6cc7f8 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2629.650486] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7044933-0081-4def-993e-d44c06a2409d {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2629.658158] env[61573]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2dc5e5-2e48-4cd5-8c41-3aae0c344819 {{(pid=61573) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2629.671573] env[61573]: DEBUG nova.compute.provider_tree [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed in ProviderTree for provider: b1eff98b-2b30-4574-a87d-d151235a2dba {{(pid=61573) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2629.680520] env[61573]: DEBUG nova.scheduler.client.report [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Inventory has not changed for provider b1eff98b-2b30-4574-a87d-d151235a2dba based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61573) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2629.694046] env[61573]: DEBUG nova.compute.resource_tracker [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61573) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2629.694236] env[61573]: DEBUG oslo_concurrency.lockutils [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.216s {{(pid=61573) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2630.693902] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2631.404638] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2631.404902] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2650.404365] env[61573]: DEBUG oslo_service.periodic_task [None req-24bc361e-e9d4-4eaa-8168-64474df69e64 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61573) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}